blob: 2c11794dbabfd84b85e7dfedd4284f7532349263 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/js-native-context-specialization.h"
6
7#include "src/accessors.h"
8#include "src/code-factory.h"
9#include "src/compilation-dependencies.h"
10#include "src/compiler/access-builder.h"
11#include "src/compiler/access-info.h"
12#include "src/compiler/js-graph.h"
13#include "src/compiler/js-operator.h"
14#include "src/compiler/linkage.h"
15#include "src/compiler/node-matchers.h"
16#include "src/field-index-inl.h"
17#include "src/isolate-inl.h"
18#include "src/objects-inl.h" // TODO(mstarzinger): Temporary cycle breaker!
19#include "src/type-cache.h"
20#include "src/type-feedback-vector.h"
21
22namespace v8 {
23namespace internal {
24namespace compiler {
25
26JSNativeContextSpecialization::JSNativeContextSpecialization(
27 Editor* editor, JSGraph* jsgraph, Flags flags,
28 MaybeHandle<Context> native_context, CompilationDependencies* dependencies,
29 Zone* zone)
30 : AdvancedReducer(editor),
31 jsgraph_(jsgraph),
32 flags_(flags),
33 native_context_(native_context),
34 dependencies_(dependencies),
35 zone_(zone),
36 type_cache_(TypeCache::Get()) {}
37
38
39Reduction JSNativeContextSpecialization::Reduce(Node* node) {
40 switch (node->opcode()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +010041 case IrOpcode::kJSLoadContext:
42 return ReduceJSLoadContext(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 case IrOpcode::kJSLoadNamed:
44 return ReduceJSLoadNamed(node);
45 case IrOpcode::kJSStoreNamed:
46 return ReduceJSStoreNamed(node);
47 case IrOpcode::kJSLoadProperty:
48 return ReduceJSLoadProperty(node);
49 case IrOpcode::kJSStoreProperty:
50 return ReduceJSStoreProperty(node);
51 default:
52 break;
53 }
54 return NoChange();
55}
56
Ben Murdoch097c5b22016-05-18 11:27:45 +010057Reduction JSNativeContextSpecialization::ReduceJSLoadContext(Node* node) {
58 DCHECK_EQ(IrOpcode::kJSLoadContext, node->opcode());
59 ContextAccess const& access = ContextAccessOf(node->op());
60 Handle<Context> native_context;
61 // Specialize JSLoadContext(NATIVE_CONTEXT_INDEX) to the known native
62 // context (if any), so we can constant-fold those fields, which is
63 // safe, since the NATIVE_CONTEXT_INDEX slot is always immutable.
64 if (access.index() == Context::NATIVE_CONTEXT_INDEX &&
65 GetNativeContext(node).ToHandle(&native_context)) {
66 Node* value = jsgraph()->HeapConstant(native_context);
67 ReplaceWithValue(node, value);
68 return Replace(value);
69 }
70 return NoChange();
71}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000072
73Reduction JSNativeContextSpecialization::ReduceNamedAccess(
74 Node* node, Node* value, MapHandleList const& receiver_maps,
75 Handle<Name> name, AccessMode access_mode, LanguageMode language_mode,
76 Node* index) {
77 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
78 node->opcode() == IrOpcode::kJSStoreNamed ||
79 node->opcode() == IrOpcode::kJSLoadProperty ||
80 node->opcode() == IrOpcode::kJSStoreProperty);
81 Node* receiver = NodeProperties::GetValueInput(node, 0);
82 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
83 Node* effect = NodeProperties::GetEffectInput(node);
84 Node* control = NodeProperties::GetControlInput(node);
85
86 // Not much we can do if deoptimization support is disabled.
87 if (!(flags() & kDeoptimizationEnabled)) return NoChange();
88
89 // Retrieve the native context from the given {node}.
90 Handle<Context> native_context;
91 if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
92
93 // Compute property access infos for the receiver maps.
94 AccessInfoFactory access_info_factory(dependencies(), native_context,
95 graph()->zone());
96 ZoneVector<PropertyAccessInfo> access_infos(zone());
97 if (!access_info_factory.ComputePropertyAccessInfos(
98 receiver_maps, name, access_mode, &access_infos)) {
99 return NoChange();
100 }
101
102 // Nothing to do if we have no non-deprecated maps.
103 if (access_infos.empty()) return NoChange();
104
105 // The final states for every polymorphic branch. We join them with
106 // Merge++Phi+EffectPhi at the bottom.
107 ZoneVector<Node*> values(zone());
108 ZoneVector<Node*> effects(zone());
109 ZoneVector<Node*> controls(zone());
110
111 // The list of "exiting" controls, which currently go to a single deoptimize.
112 // TODO(bmeurer): Consider using an IC as fallback.
113 Node* const exit_effect = effect;
114 ZoneVector<Node*> exit_controls(zone());
115
116 // Ensure that {index} matches the specified {name} (if {index} is given).
117 if (index != nullptr) {
118 Node* check = graph()->NewNode(simplified()->ReferenceEqual(Type::Name()),
119 index, jsgraph()->HeapConstant(name));
120 Node* branch =
121 graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
122 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
123 control = graph()->NewNode(common()->IfTrue(), branch);
124 }
125
126 // Ensure that {receiver} is a heap object.
127 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
128 Node* branch = graph()->NewNode(common()->Branch(), check, control);
129 control = graph()->NewNode(common()->IfFalse(), branch);
130 Node* receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
131 Node* receiverissmi_effect = effect;
132
133 // Load the {receiver} map. The resulting effect is the dominating effect for
134 // all (polymorphic) branches.
135 Node* receiver_map = effect =
136 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
137 receiver, effect, control);
138
139 // Generate code for the various different property access patterns.
140 Node* fallthrough_control = control;
141 for (PropertyAccessInfo const& access_info : access_infos) {
142 Node* this_value = value;
143 Node* this_receiver = receiver;
144 Node* this_effect = effect;
145 Node* this_control;
146
147 // Perform map check on {receiver}.
148 Type* receiver_type = access_info.receiver_type();
149 if (receiver_type->Is(Type::String())) {
150 // Emit an instance type check for strings.
151 Node* receiver_instance_type = this_effect = graph()->NewNode(
152 simplified()->LoadField(AccessBuilder::ForMapInstanceType()),
153 receiver_map, this_effect, fallthrough_control);
154 Node* check =
155 graph()->NewNode(machine()->Uint32LessThan(), receiver_instance_type,
156 jsgraph()->Uint32Constant(FIRST_NONSTRING_TYPE));
157 Node* branch =
158 graph()->NewNode(common()->Branch(), check, fallthrough_control);
159 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
160 this_control = graph()->NewNode(common()->IfTrue(), branch);
161 } else {
162 // Emit a (sequence of) map checks for other {receiver}s.
163 ZoneVector<Node*> this_controls(zone());
164 ZoneVector<Node*> this_effects(zone());
165 for (auto i = access_info.receiver_type()->Classes(); !i.Done();
166 i.Advance()) {
167 Handle<Map> map = i.Current();
168 Node* check =
169 graph()->NewNode(simplified()->ReferenceEqual(Type::Internal()),
170 receiver_map, jsgraph()->Constant(map));
171 Node* branch =
172 graph()->NewNode(common()->Branch(), check, fallthrough_control);
173 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
174 this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
175 this_effects.push_back(this_effect);
176 }
177
178 // The Number case requires special treatment to also deal with Smis.
179 if (receiver_type->Is(Type::Number())) {
180 // Join this check with the "receiver is smi" check above, and mark the
181 // "receiver is smi" check as "consumed" so that we don't deoptimize if
182 // the {receiver} is actually a Smi.
183 if (receiverissmi_control != nullptr) {
184 this_controls.push_back(receiverissmi_control);
185 this_effects.push_back(receiverissmi_effect);
186 receiverissmi_control = receiverissmi_effect = nullptr;
187 }
188 }
189
190 // Create dominating Merge+EffectPhi for this {receiver} type.
191 int const this_control_count = static_cast<int>(this_controls.size());
192 this_control =
193 (this_control_count == 1)
194 ? this_controls.front()
195 : graph()->NewNode(common()->Merge(this_control_count),
196 this_control_count, &this_controls.front());
197 this_effects.push_back(this_control);
198 int const this_effect_count = static_cast<int>(this_effects.size());
199 this_effect =
200 (this_control_count == 1)
201 ? this_effects.front()
202 : graph()->NewNode(common()->EffectPhi(this_control_count),
203 this_effect_count, &this_effects.front());
204 }
205
206 // Determine actual holder and perform prototype chain checks.
207 Handle<JSObject> holder;
208 if (access_info.holder().ToHandle(&holder)) {
209 AssumePrototypesStable(receiver_type, native_context, holder);
210 }
211
212 // Generate the actual property access.
213 if (access_info.IsNotFound()) {
214 DCHECK_EQ(AccessMode::kLoad, access_mode);
215 if (is_strong(language_mode)) {
216 // TODO(bmeurer/mstarzinger): Add support for lowering inside try
217 // blocks rewiring the IfException edge to a runtime call/throw.
218 exit_controls.push_back(this_control);
219 continue;
220 } else {
221 this_value = jsgraph()->UndefinedConstant();
222 }
223 } else if (access_info.IsDataConstant()) {
224 this_value = jsgraph()->Constant(access_info.constant());
225 if (access_mode == AccessMode::kStore) {
226 Node* check = graph()->NewNode(
227 simplified()->ReferenceEqual(Type::Tagged()), value, this_value);
228 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
229 check, this_control);
230 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
231 this_control = graph()->NewNode(common()->IfTrue(), branch);
232 }
233 } else {
234 DCHECK(access_info.IsDataField());
235 FieldIndex const field_index = access_info.field_index();
236 FieldCheck const field_check = access_info.field_check();
237 Type* const field_type = access_info.field_type();
238 switch (field_check) {
239 case FieldCheck::kNone:
240 break;
241 case FieldCheck::kJSArrayBufferViewBufferNotNeutered: {
242 Node* this_buffer = this_effect =
243 graph()->NewNode(simplified()->LoadField(
244 AccessBuilder::ForJSArrayBufferViewBuffer()),
245 this_receiver, this_effect, this_control);
246 Node* this_buffer_bit_field = this_effect =
247 graph()->NewNode(simplified()->LoadField(
248 AccessBuilder::ForJSArrayBufferBitField()),
249 this_buffer, this_effect, this_control);
250 Node* check = graph()->NewNode(
251 machine()->Word32Equal(),
252 graph()->NewNode(machine()->Word32And(), this_buffer_bit_field,
253 jsgraph()->Int32Constant(
254 1 << JSArrayBuffer::WasNeutered::kShift)),
255 jsgraph()->Int32Constant(0));
256 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
257 check, this_control);
258 exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
259 this_control = graph()->NewNode(common()->IfFalse(), branch);
260 break;
261 }
262 }
263 if (access_mode == AccessMode::kLoad &&
264 access_info.holder().ToHandle(&holder)) {
265 this_receiver = jsgraph()->Constant(holder);
266 }
267 Node* this_storage = this_receiver;
268 if (!field_index.is_inobject()) {
269 this_storage = this_effect = graph()->NewNode(
270 simplified()->LoadField(AccessBuilder::ForJSObjectProperties()),
271 this_storage, this_effect, this_control);
272 }
273 FieldAccess field_access = {kTaggedBase, field_index.offset(), name,
274 field_type, MachineType::AnyTagged()};
275 if (access_mode == AccessMode::kLoad) {
276 if (field_type->Is(Type::UntaggedFloat64())) {
277 if (!field_index.is_inobject() || field_index.is_hidden_field() ||
278 !FLAG_unbox_double_fields) {
279 this_storage = this_effect =
280 graph()->NewNode(simplified()->LoadField(field_access),
281 this_storage, this_effect, this_control);
282 field_access.offset = HeapNumber::kValueOffset;
283 field_access.name = MaybeHandle<Name>();
284 }
285 field_access.machine_type = MachineType::Float64();
286 }
287 this_value = this_effect =
288 graph()->NewNode(simplified()->LoadField(field_access),
289 this_storage, this_effect, this_control);
290 } else {
291 DCHECK_EQ(AccessMode::kStore, access_mode);
292 if (field_type->Is(Type::UntaggedFloat64())) {
293 Node* check =
294 graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
295 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
296 check, this_control);
297 exit_controls.push_back(
298 graph()->NewNode(common()->IfFalse(), branch));
299 this_control = graph()->NewNode(common()->IfTrue(), branch);
300 this_value = graph()->NewNode(common()->Guard(Type::Number()),
301 this_value, this_control);
302
303 if (!field_index.is_inobject() || field_index.is_hidden_field() ||
304 !FLAG_unbox_double_fields) {
305 if (access_info.HasTransitionMap()) {
306 // Allocate a MutableHeapNumber for the new property.
307 Callable callable =
308 CodeFactory::AllocateMutableHeapNumber(isolate());
309 CallDescriptor* desc = Linkage::GetStubCallDescriptor(
310 isolate(), jsgraph()->zone(), callable.descriptor(), 0,
311 CallDescriptor::kNoFlags, Operator::kNoThrow);
312 Node* this_box = this_effect = graph()->NewNode(
313 common()->Call(desc),
314 jsgraph()->HeapConstant(callable.code()),
315 jsgraph()->NoContextConstant(), this_effect, this_control);
316 this_effect = graph()->NewNode(
317 simplified()->StoreField(AccessBuilder::ForHeapNumberValue()),
318 this_box, this_value, this_effect, this_control);
319 this_value = this_box;
320
321 field_access.type = Type::TaggedPointer();
322 } else {
323 // We just store directly to the MutableHeapNumber.
324 this_storage = this_effect =
325 graph()->NewNode(simplified()->LoadField(field_access),
326 this_storage, this_effect, this_control);
327 field_access.offset = HeapNumber::kValueOffset;
328 field_access.name = MaybeHandle<Name>();
329 field_access.machine_type = MachineType::Float64();
330 }
331 } else {
332 // Unboxed double field, we store directly to the field.
333 field_access.machine_type = MachineType::Float64();
334 }
335 } else if (field_type->Is(Type::TaggedSigned())) {
336 Node* check =
337 graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
338 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
339 check, this_control);
340 exit_controls.push_back(
341 graph()->NewNode(common()->IfFalse(), branch));
342 this_control = graph()->NewNode(common()->IfTrue(), branch);
343 this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
344 this_value, this_control);
345 } else if (field_type->Is(Type::TaggedPointer())) {
346 Node* check =
347 graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
348 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
349 check, this_control);
350 exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
351 this_control = graph()->NewNode(common()->IfFalse(), branch);
352 if (field_type->NumClasses() > 0) {
353 // Emit a (sequence of) map checks for the value.
354 ZoneVector<Node*> this_controls(zone());
355 Node* this_value_map = this_effect = graph()->NewNode(
356 simplified()->LoadField(AccessBuilder::ForMap()), this_value,
357 this_effect, this_control);
358 for (auto i = field_type->Classes(); !i.Done(); i.Advance()) {
359 Handle<Map> field_map(i.Current());
360 check = graph()->NewNode(
361 simplified()->ReferenceEqual(Type::Internal()),
362 this_value_map, jsgraph()->Constant(field_map));
363 branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
364 check, this_control);
365 this_control = graph()->NewNode(common()->IfFalse(), branch);
366 this_controls.push_back(
367 graph()->NewNode(common()->IfTrue(), branch));
368 }
369 exit_controls.push_back(this_control);
370 int const this_control_count =
371 static_cast<int>(this_controls.size());
372 this_control =
373 (this_control_count == 1)
374 ? this_controls.front()
375 : graph()->NewNode(common()->Merge(this_control_count),
376 this_control_count,
377 &this_controls.front());
378 }
379 } else {
380 DCHECK(field_type->Is(Type::Tagged()));
381 }
382 Handle<Map> transition_map;
383 if (access_info.transition_map().ToHandle(&transition_map)) {
384 this_effect = graph()->NewNode(common()->BeginRegion(), this_effect);
385 this_effect = graph()->NewNode(
386 simplified()->StoreField(AccessBuilder::ForMap()), this_receiver,
387 jsgraph()->Constant(transition_map), this_effect, this_control);
388 }
389 this_effect = graph()->NewNode(simplified()->StoreField(field_access),
390 this_storage, this_value, this_effect,
391 this_control);
392 if (access_info.HasTransitionMap()) {
393 this_effect =
394 graph()->NewNode(common()->FinishRegion(),
395 jsgraph()->UndefinedConstant(), this_effect);
396 }
397 }
398 }
399
400 // Remember the final state for this property access.
401 values.push_back(this_value);
402 effects.push_back(this_effect);
403 controls.push_back(this_control);
404 }
405
406 // Collect the fallthrough control as final "exit" control.
407 if (fallthrough_control != control) {
408 // Mark the last fallthrough branch as deferred.
409 MarkAsDeferred(fallthrough_control);
410 }
411 exit_controls.push_back(fallthrough_control);
412
413 // Also collect the "receiver is smi" control if we didn't handle the case of
414 // Number primitives in the polymorphic branches above.
415 if (receiverissmi_control != nullptr) {
416 // Mark the "receiver is smi" case as deferred.
417 MarkAsDeferred(receiverissmi_control);
418 DCHECK_EQ(exit_effect, receiverissmi_effect);
419 exit_controls.push_back(receiverissmi_control);
420 }
421
422 // Generate the single "exit" point, where we get if either all map/instance
423 // type checks failed, or one of the assumptions inside one of the cases
424 // failes (i.e. failing prototype chain check).
425 // TODO(bmeurer): Consider falling back to IC here if deoptimization is
426 // disabled.
427 int const exit_control_count = static_cast<int>(exit_controls.size());
428 Node* exit_control =
429 (exit_control_count == 1)
430 ? exit_controls.front()
431 : graph()->NewNode(common()->Merge(exit_control_count),
432 exit_control_count, &exit_controls.front());
433 Node* deoptimize =
434 graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
435 frame_state, exit_effect, exit_control);
436 // TODO(bmeurer): This should be on the AdvancedReducer somehow.
437 NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100438 Revisit(graph()->end());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439
440 // Generate the final merge point for all (polymorphic) branches.
441 int const control_count = static_cast<int>(controls.size());
442 if (control_count == 0) {
443 value = effect = control = jsgraph()->Dead();
444 } else if (control_count == 1) {
445 value = values.front();
446 effect = effects.front();
447 control = controls.front();
448 } else {
449 control = graph()->NewNode(common()->Merge(control_count), control_count,
450 &controls.front());
451 values.push_back(control);
452 value = graph()->NewNode(
453 common()->Phi(MachineRepresentation::kTagged, control_count),
454 control_count + 1, &values.front());
455 effects.push_back(control);
456 effect = graph()->NewNode(common()->EffectPhi(control_count),
457 control_count + 1, &effects.front());
458 }
459 ReplaceWithValue(node, value, effect, control);
460 return Replace(value);
461}
462
463
Ben Murdoch097c5b22016-05-18 11:27:45 +0100464Reduction JSNativeContextSpecialization::ReduceNamedAccess(
465 Node* node, Node* value, FeedbackNexus const& nexus, Handle<Name> name,
466 AccessMode access_mode, LanguageMode language_mode) {
467 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
468 node->opcode() == IrOpcode::kJSStoreNamed);
469
470 // Check if the {nexus} reports type feedback for the IC.
471 if (nexus.IsUninitialized()) {
472 if ((flags() & kDeoptimizationEnabled) &&
473 (flags() & kBailoutOnUninitialized)) {
474 // TODO(turbofan): Implement all eager bailout points correctly in
475 // the graph builder.
476 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
477 if (!OpParameter<FrameStateInfo>(frame_state).bailout_id().IsNone()) {
478 return ReduceSoftDeoptimize(node);
479 }
480 }
481 return NoChange();
482 }
483
484 // Extract receiver maps from the IC using the {nexus}.
485 MapHandleList receiver_maps;
486 if (nexus.ExtractMaps(&receiver_maps) == 0) return NoChange();
487 DCHECK_LT(0, receiver_maps.length());
488
489 // Try to lower the named access based on the {receiver_maps}.
490 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
491 language_mode);
492}
493
494
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000495Reduction JSNativeContextSpecialization::ReduceJSLoadNamed(Node* node) {
496 DCHECK_EQ(IrOpcode::kJSLoadNamed, node->opcode());
497 NamedAccess const& p = NamedAccessOf(node->op());
498 Node* const value = jsgraph()->Dead();
499
500 // Extract receiver maps from the LOAD_IC using the LoadICNexus.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000501 if (!p.feedback().IsValid()) return NoChange();
502 LoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000503
504 // Try to lower the named access based on the {receiver_maps}.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100505 return ReduceNamedAccess(node, value, nexus, p.name(), AccessMode::kLoad,
506 p.language_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000507}
508
509
510Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
511 DCHECK_EQ(IrOpcode::kJSStoreNamed, node->opcode());
512 NamedAccess const& p = NamedAccessOf(node->op());
513 Node* const value = NodeProperties::GetValueInput(node, 1);
514
515 // Extract receiver maps from the STORE_IC using the StoreICNexus.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000516 if (!p.feedback().IsValid()) return NoChange();
517 StoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518
519 // Try to lower the named access based on the {receiver_maps}.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100520 return ReduceNamedAccess(node, value, nexus, p.name(), AccessMode::kStore,
521 p.language_mode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000522}
523
524
525Reduction JSNativeContextSpecialization::ReduceElementAccess(
526 Node* node, Node* index, Node* value, MapHandleList const& receiver_maps,
527 AccessMode access_mode, LanguageMode language_mode,
528 KeyedAccessStoreMode store_mode) {
529 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
530 node->opcode() == IrOpcode::kJSStoreProperty);
531 Node* receiver = NodeProperties::GetValueInput(node, 0);
532 Node* context = NodeProperties::GetContextInput(node);
533 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
534 Node* effect = NodeProperties::GetEffectInput(node);
535 Node* control = NodeProperties::GetControlInput(node);
536
537 // Not much we can do if deoptimization support is disabled.
538 if (!(flags() & kDeoptimizationEnabled)) return NoChange();
539
540 // TODO(bmeurer): Add support for non-standard stores.
541 if (store_mode != STANDARD_STORE) return NoChange();
542
543 // Retrieve the native context from the given {node}.
544 Handle<Context> native_context;
545 if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
546
547 // Compute element access infos for the receiver maps.
548 AccessInfoFactory access_info_factory(dependencies(), native_context,
549 graph()->zone());
550 ZoneVector<ElementAccessInfo> access_infos(zone());
551 if (!access_info_factory.ComputeElementAccessInfos(receiver_maps, access_mode,
552 &access_infos)) {
553 return NoChange();
554 }
555
556 // Nothing to do if we have no non-deprecated maps.
557 if (access_infos.empty()) return NoChange();
558
559 // The final states for every polymorphic branch. We join them with
560 // Merge+Phi+EffectPhi at the bottom.
561 ZoneVector<Node*> values(zone());
562 ZoneVector<Node*> effects(zone());
563 ZoneVector<Node*> controls(zone());
564
565 // The list of "exiting" controls, which currently go to a single deoptimize.
566 // TODO(bmeurer): Consider using an IC as fallback.
567 Node* const exit_effect = effect;
568 ZoneVector<Node*> exit_controls(zone());
569
570 // Ensure that {receiver} is a heap object.
571 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
572 Node* branch =
573 graph()->NewNode(common()->Branch(BranchHint::kFalse), check, control);
574 exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
575 control = graph()->NewNode(common()->IfFalse(), branch);
576
577 // Load the {receiver} map. The resulting effect is the dominating effect for
578 // all (polymorphic) branches.
579 Node* receiver_map = effect =
580 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
581 receiver, effect, control);
582
583 // Generate code for the various different element access patterns.
584 Node* fallthrough_control = control;
585 for (ElementAccessInfo const& access_info : access_infos) {
586 Node* this_receiver = receiver;
587 Node* this_value = value;
588 Node* this_index = index;
589 Node* this_effect;
590 Node* this_control;
591
592 // Perform map check on {receiver}.
593 Type* receiver_type = access_info.receiver_type();
594 bool receiver_is_jsarray = true;
595 {
596 ZoneVector<Node*> this_controls(zone());
597 ZoneVector<Node*> this_effects(zone());
598 for (auto i = access_info.receiver_type()->Classes(); !i.Done();
599 i.Advance()) {
600 Handle<Map> map = i.Current();
601 Node* check =
602 graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
603 receiver_map, jsgraph()->Constant(map));
604 Node* branch =
605 graph()->NewNode(common()->Branch(), check, fallthrough_control);
606 this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
607 this_effects.push_back(effect);
608 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
609 if (!map->IsJSArrayMap()) receiver_is_jsarray = false;
610 }
611
612 // Generate possible elements kind transitions.
613 for (auto transition : access_info.transitions()) {
614 Handle<Map> transition_source = transition.first;
615 Handle<Map> transition_target = transition.second;
616
617 // Check if {receiver} has the specified {transition_source} map.
618 Node* check = graph()->NewNode(
619 simplified()->ReferenceEqual(Type::Any()), receiver_map,
620 jsgraph()->HeapConstant(transition_source));
621 Node* branch =
622 graph()->NewNode(common()->Branch(), check, fallthrough_control);
623
624 // Migrate {receiver} from {transition_source} to {transition_target}.
625 Node* transition_control = graph()->NewNode(common()->IfTrue(), branch);
626 Node* transition_effect = effect;
627 if (IsSimpleMapChangeTransition(transition_source->elements_kind(),
628 transition_target->elements_kind())) {
629 // In-place migration, just store the {transition_target} map.
630 transition_effect = graph()->NewNode(
631 simplified()->StoreField(AccessBuilder::ForMap()), receiver,
632 jsgraph()->HeapConstant(transition_target), transition_effect,
633 transition_control);
634 } else {
635 // Instance migration, let the stub deal with the {receiver}.
636 TransitionElementsKindStub stub(isolate(),
637 transition_source->elements_kind(),
638 transition_target->elements_kind(),
639 transition_source->IsJSArrayMap());
640 CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
641 isolate(), graph()->zone(), stub.GetCallInterfaceDescriptor(), 0,
642 CallDescriptor::kNeedsFrameState, node->op()->properties());
643 transition_effect = graph()->NewNode(
644 common()->Call(desc), jsgraph()->HeapConstant(stub.GetCode()),
645 receiver, jsgraph()->HeapConstant(transition_target), context,
646 frame_state, transition_effect, transition_control);
647 }
648 this_controls.push_back(transition_control);
649 this_effects.push_back(transition_effect);
650
651 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
652 }
653
654 // Create single chokepoint for the control.
655 int const this_control_count = static_cast<int>(this_controls.size());
656 if (this_control_count == 1) {
657 this_control = this_controls.front();
658 this_effect = this_effects.front();
659 } else {
660 this_control =
661 graph()->NewNode(common()->Merge(this_control_count),
662 this_control_count, &this_controls.front());
663 this_effects.push_back(this_control);
664 this_effect =
665 graph()->NewNode(common()->EffectPhi(this_control_count),
666 this_control_count + 1, &this_effects.front());
667 }
668 }
669
670 // Certain stores need a prototype chain check because shape changes
671 // could allow callbacks on elements in the prototype chain that are
672 // not compatible with (monomorphic) keyed stores.
673 Handle<JSObject> holder;
674 if (access_info.holder().ToHandle(&holder)) {
675 AssumePrototypesStable(receiver_type, native_context, holder);
676 }
677
678 // Check that the {index} is actually a Number.
679 if (!NumberMatcher(this_index).HasValue()) {
680 Node* check =
681 graph()->NewNode(simplified()->ObjectIsNumber(), this_index);
682 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
683 check, this_control);
684 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
685 this_control = graph()->NewNode(common()->IfTrue(), branch);
686 this_index = graph()->NewNode(common()->Guard(Type::Number()), this_index,
687 this_control);
688 }
689
690 // Convert the {index} to an unsigned32 value and check if the result is
691 // equal to the original {index}.
692 if (!NumberMatcher(this_index).IsInRange(0.0, kMaxUInt32)) {
693 Node* this_index32 =
694 graph()->NewNode(simplified()->NumberToUint32(), this_index);
695 Node* check = graph()->NewNode(simplified()->NumberEqual(), this_index32,
696 this_index);
697 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
698 check, this_control);
699 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
700 this_control = graph()->NewNode(common()->IfTrue(), branch);
701 this_index = this_index32;
702 }
703
704 // TODO(bmeurer): We currently specialize based on elements kind. We should
705 // also be able to properly support strings and other JSObjects here.
706 ElementsKind elements_kind = access_info.elements_kind();
707
708 // Load the elements for the {receiver}.
709 Node* this_elements = this_effect = graph()->NewNode(
710 simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
711 this_receiver, this_effect, this_control);
712
713 // Don't try to store to a copy-on-write backing store.
714 if (access_mode == AccessMode::kStore &&
715 IsFastSmiOrObjectElementsKind(elements_kind)) {
716 Node* this_elements_map = this_effect =
717 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
718 this_elements, this_effect, this_control);
719 check = graph()->NewNode(
720 simplified()->ReferenceEqual(Type::Any()), this_elements_map,
721 jsgraph()->HeapConstant(factory()->fixed_array_map()));
722 branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
723 this_control);
724 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
725 this_control = graph()->NewNode(common()->IfTrue(), branch);
726 }
727
728 // Load the length of the {receiver}.
729 Node* this_length = this_effect =
730 receiver_is_jsarray
731 ? graph()->NewNode(
732 simplified()->LoadField(
733 AccessBuilder::ForJSArrayLength(elements_kind)),
734 this_receiver, this_effect, this_control)
735 : graph()->NewNode(
736 simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
737 this_elements, this_effect, this_control);
738
739 // Check that the {index} is in the valid range for the {receiver}.
740 Node* check = graph()->NewNode(simplified()->NumberLessThan(), this_index,
741 this_length);
742 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
743 this_control);
744 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
745 this_control = graph()->NewNode(common()->IfTrue(), branch);
746
747 // Compute the element access.
748 Type* element_type = Type::Any();
749 MachineType element_machine_type = MachineType::AnyTagged();
750 if (IsFastDoubleElementsKind(elements_kind)) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100751 element_type = Type::Number();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000752 element_machine_type = MachineType::Float64();
753 } else if (IsFastSmiElementsKind(elements_kind)) {
754 element_type = type_cache_.kSmi;
755 }
756 ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
757 element_type, element_machine_type};
758
759 // Access the actual element.
760 // TODO(bmeurer): Refactor this into separate methods or even a separate
761 // class that deals with the elements access.
762 if (access_mode == AccessMode::kLoad) {
763 // Compute the real element access type, which includes the hole in case
764 // of holey backing stores.
765 if (elements_kind == FAST_HOLEY_ELEMENTS ||
766 elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
767 element_access.type = Type::Union(
768 element_type,
769 Type::Constant(factory()->the_hole_value(), graph()->zone()),
770 graph()->zone());
771 }
772 // Perform the actual backing store access.
773 this_value = this_effect = graph()->NewNode(
774 simplified()->LoadElement(element_access), this_elements, this_index,
775 this_effect, this_control);
776 // Handle loading from holey backing stores correctly, by either mapping
777 // the hole to undefined if possible, or deoptimizing otherwise.
778 if (elements_kind == FAST_HOLEY_ELEMENTS ||
779 elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
780 // Perform the hole check on the result.
781 Node* check =
782 graph()->NewNode(simplified()->ReferenceEqual(element_access.type),
783 this_value, jsgraph()->TheHoleConstant());
784 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
785 check, this_control);
786 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
787 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
788 // Check if we are allowed to turn the hole into undefined.
789 Type* initial_holey_array_type = Type::Class(
790 handle(isolate()->get_initial_js_array_map(elements_kind)),
791 graph()->zone());
792 if (receiver_type->NowIs(initial_holey_array_type) &&
793 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
794 // Add a code dependency on the array protector cell.
795 AssumePrototypesStable(receiver_type, native_context,
796 isolate()->initial_object_prototype());
797 dependencies()->AssumePropertyCell(factory()->array_protector());
798 // Turn the hole into undefined.
799 this_control =
800 graph()->NewNode(common()->Merge(2), if_true, if_false);
801 this_value = graph()->NewNode(
802 common()->Phi(MachineRepresentation::kTagged, 2),
803 jsgraph()->UndefinedConstant(), this_value, this_control);
804 element_type =
805 Type::Union(element_type, Type::Undefined(), graph()->zone());
806 } else {
807 // Deoptimize in case of the hole.
808 exit_controls.push_back(if_true);
809 this_control = if_false;
810 }
811 // Rename the result to represent the actual type (not polluted by the
812 // hole).
813 this_value = graph()->NewNode(common()->Guard(element_type), this_value,
814 this_control);
815 } else if (elements_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
816 // Perform the hole check on the result.
817 Node* check =
818 graph()->NewNode(simplified()->NumberIsHoleNaN(), this_value);
819 // Check if we are allowed to return the hole directly.
820 Type* initial_holey_array_type = Type::Class(
821 handle(isolate()->get_initial_js_array_map(elements_kind)),
822 graph()->zone());
823 if (receiver_type->NowIs(initial_holey_array_type) &&
824 isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
825 // Add a code dependency on the array protector cell.
826 AssumePrototypesStable(receiver_type, native_context,
827 isolate()->initial_object_prototype());
828 dependencies()->AssumePropertyCell(factory()->array_protector());
829 // Turn the hole into undefined.
830 this_value = graph()->NewNode(
831 common()->Select(MachineRepresentation::kTagged,
832 BranchHint::kFalse),
833 check, jsgraph()->UndefinedConstant(), this_value);
834 } else {
835 // Deoptimize in case of the hole.
836 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
837 check, this_control);
838 this_control = graph()->NewNode(common()->IfFalse(), branch);
839 exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
840 }
841 }
842 } else {
843 DCHECK_EQ(AccessMode::kStore, access_mode);
844 if (IsFastSmiElementsKind(elements_kind)) {
845 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
846 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
847 check, this_control);
848 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
849 this_control = graph()->NewNode(common()->IfTrue(), branch);
850 this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
851 this_value, this_control);
852 } else if (IsFastDoubleElementsKind(elements_kind)) {
853 Node* check =
854 graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
855 Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
856 check, this_control);
857 exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
858 this_control = graph()->NewNode(common()->IfTrue(), branch);
859 this_value = graph()->NewNode(common()->Guard(Type::Number()),
860 this_value, this_control);
861 }
862 this_effect = graph()->NewNode(simplified()->StoreElement(element_access),
863 this_elements, this_index, this_value,
864 this_effect, this_control);
865 }
866
867 // Remember the final state for this element access.
868 values.push_back(this_value);
869 effects.push_back(this_effect);
870 controls.push_back(this_control);
871 }
872
873 // Collect the fallthrough control as final "exit" control.
874 if (fallthrough_control != control) {
875 // Mark the last fallthrough branch as deferred.
876 MarkAsDeferred(fallthrough_control);
877 }
878 exit_controls.push_back(fallthrough_control);
879
880 // Generate the single "exit" point, where we get if either all map/instance
881 // type checks failed, or one of the assumptions inside one of the cases
882 // failes (i.e. failing prototype chain check).
883 // TODO(bmeurer): Consider falling back to IC here if deoptimization is
884 // disabled.
885 int const exit_control_count = static_cast<int>(exit_controls.size());
886 Node* exit_control =
887 (exit_control_count == 1)
888 ? exit_controls.front()
889 : graph()->NewNode(common()->Merge(exit_control_count),
890 exit_control_count, &exit_controls.front());
891 Node* deoptimize =
892 graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
893 frame_state, exit_effect, exit_control);
894 // TODO(bmeurer): This should be on the AdvancedReducer somehow.
895 NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100896 Revisit(graph()->end());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000897
898 // Generate the final merge point for all (polymorphic) branches.
899 int const control_count = static_cast<int>(controls.size());
900 if (control_count == 0) {
901 value = effect = control = jsgraph()->Dead();
902 } else if (control_count == 1) {
903 value = values.front();
904 effect = effects.front();
905 control = controls.front();
906 } else {
907 control = graph()->NewNode(common()->Merge(control_count), control_count,
908 &controls.front());
909 values.push_back(control);
910 value = graph()->NewNode(
911 common()->Phi(MachineRepresentation::kTagged, control_count),
912 control_count + 1, &values.front());
913 effects.push_back(control);
914 effect = graph()->NewNode(common()->EffectPhi(control_count),
915 control_count + 1, &effects.front());
916 }
917 ReplaceWithValue(node, value, effect, control);
918 return Replace(value);
919}
920
921
922Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
923 Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
924 AccessMode access_mode, LanguageMode language_mode,
925 KeyedAccessStoreMode store_mode) {
926 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
927 node->opcode() == IrOpcode::kJSStoreProperty);
928
Ben Murdoch097c5b22016-05-18 11:27:45 +0100929 // Check if the {nexus} reports type feedback for the IC.
930 if (nexus.IsUninitialized()) {
931 if ((flags() & kDeoptimizationEnabled) &&
932 (flags() & kBailoutOnUninitialized)) {
933 // TODO(turbofan): Implement all eager bailout points correctly in
934 // the graph builder.
935 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
936 if (!OpParameter<FrameStateInfo>(frame_state).bailout_id().IsNone()) {
937 return ReduceSoftDeoptimize(node);
938 }
939 }
940 return NoChange();
941 }
942
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000943 // Extract receiver maps from the {nexus}.
944 MapHandleList receiver_maps;
945 if (nexus.ExtractMaps(&receiver_maps) == 0) return NoChange();
946 DCHECK_LT(0, receiver_maps.length());
947
948 // Optimize access for constant {index}.
949 HeapObjectMatcher mindex(index);
950 if (mindex.HasValue() && mindex.Value()->IsPrimitive()) {
951 // Keyed access requires a ToPropertyKey on the {index} first before
952 // looking up the property on the object (see ES6 section 12.3.2.1).
953 // We can only do this for non-observable ToPropertyKey invocations,
954 // so we limit the constant indices to primitives at this point.
955 Handle<Name> name;
956 if (Object::ToName(isolate(), mindex.Value()).ToHandle(&name)) {
957 uint32_t array_index;
958 if (name->AsArrayIndex(&array_index)) {
959 // Use the constant array index.
960 index = jsgraph()->Constant(static_cast<double>(array_index));
961 } else {
962 name = factory()->InternalizeName(name);
963 return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
964 language_mode);
965 }
966 }
967 }
968
969 // Check if we have feedback for a named access.
970 if (Name* name = nexus.FindFirstName()) {
971 return ReduceNamedAccess(node, value, receiver_maps,
972 handle(name, isolate()), access_mode,
973 language_mode, index);
974 }
975
976 // Try to lower the element access based on the {receiver_maps}.
977 return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
978 language_mode, store_mode);
979}
980
981
Ben Murdoch097c5b22016-05-18 11:27:45 +0100982Reduction JSNativeContextSpecialization::ReduceSoftDeoptimize(Node* node) {
983 Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
984 Node* effect = NodeProperties::GetEffectInput(node);
985 Node* control = NodeProperties::GetControlInput(node);
986 Node* deoptimize =
987 graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kSoft), frame_state,
988 effect, control);
989 // TODO(bmeurer): This should be on the AdvancedReducer somehow.
990 NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
991 Revisit(graph()->end());
992 node->TrimInputCount(0);
993 NodeProperties::ChangeOp(node, common()->Dead());
994 return Changed(node);
995}
996
997
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000998Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
999 DCHECK_EQ(IrOpcode::kJSLoadProperty, node->opcode());
1000 PropertyAccess const& p = PropertyAccessOf(node->op());
1001 Node* const index = NodeProperties::GetValueInput(node, 1);
1002 Node* const value = jsgraph()->Dead();
1003
1004 // Extract receiver maps from the KEYED_LOAD_IC using the KeyedLoadICNexus.
1005 if (!p.feedback().IsValid()) return NoChange();
1006 KeyedLoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
1007
1008 // Try to lower the keyed access based on the {nexus}.
1009 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kLoad,
1010 p.language_mode(), STANDARD_STORE);
1011}
1012
1013
1014Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
1015 DCHECK_EQ(IrOpcode::kJSStoreProperty, node->opcode());
1016 PropertyAccess const& p = PropertyAccessOf(node->op());
1017 Node* const index = NodeProperties::GetValueInput(node, 1);
1018 Node* const value = NodeProperties::GetValueInput(node, 2);
1019
1020 // Extract receiver maps from the KEYED_STORE_IC using the KeyedStoreICNexus.
1021 if (!p.feedback().IsValid()) return NoChange();
1022 KeyedStoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
1023
1024 // Extract the keyed access store mode from the KEYED_STORE_IC.
1025 KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
1026
1027 // Try to lower the keyed access based on the {nexus}.
1028 return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
1029 p.language_mode(), store_mode);
1030}
1031
1032
1033void JSNativeContextSpecialization::AssumePrototypesStable(
1034 Type* receiver_type, Handle<Context> native_context,
1035 Handle<JSObject> holder) {
1036 // Determine actual holder and perform prototype chain checks.
1037 for (auto i = receiver_type->Classes(); !i.Done(); i.Advance()) {
1038 Handle<Map> map = i.Current();
1039 // Perform the implicit ToObject for primitives here.
1040 // Implemented according to ES6 section 7.3.2 GetV (V, P).
1041 Handle<JSFunction> constructor;
1042 if (Map::GetConstructorFunction(map, native_context)
1043 .ToHandle(&constructor)) {
1044 map = handle(constructor->initial_map(), isolate());
1045 }
1046 dependencies()->AssumePrototypeMapsStable(map, holder);
1047 }
1048}
1049
1050
1051void JSNativeContextSpecialization::MarkAsDeferred(Node* if_projection) {
1052 Node* branch = NodeProperties::GetControlInput(if_projection);
1053 DCHECK_EQ(IrOpcode::kBranch, branch->opcode());
1054 if (if_projection->opcode() == IrOpcode::kIfTrue) {
1055 NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kFalse));
1056 } else {
1057 DCHECK_EQ(IrOpcode::kIfFalse, if_projection->opcode());
1058 NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kTrue));
1059 }
1060}
1061
1062
1063MaybeHandle<Context> JSNativeContextSpecialization::GetNativeContext(
1064 Node* node) {
1065 Node* const context = NodeProperties::GetContextInput(node);
1066 return NodeProperties::GetSpecializationNativeContext(context,
1067 native_context());
1068}
1069
1070
1071Graph* JSNativeContextSpecialization::graph() const {
1072 return jsgraph()->graph();
1073}
1074
1075
1076Isolate* JSNativeContextSpecialization::isolate() const {
1077 return jsgraph()->isolate();
1078}
1079
1080
1081Factory* JSNativeContextSpecialization::factory() const {
1082 return isolate()->factory();
1083}
1084
1085
1086MachineOperatorBuilder* JSNativeContextSpecialization::machine() const {
1087 return jsgraph()->machine();
1088}
1089
1090
1091CommonOperatorBuilder* JSNativeContextSpecialization::common() const {
1092 return jsgraph()->common();
1093}
1094
1095
1096JSOperatorBuilder* JSNativeContextSpecialization::javascript() const {
1097 return jsgraph()->javascript();
1098}
1099
1100
1101SimplifiedOperatorBuilder* JSNativeContextSpecialization::simplified() const {
1102 return jsgraph()->simplified();
1103}
1104
1105} // namespace compiler
1106} // namespace internal
1107} // namespace v8