blob: 907b36a93b7dd860c2a32e27a6b685444c9baf33 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/change-lowering.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00006
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/address-map.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04008#include "src/code-factory.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/compiler/js-graph.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040010#include "src/compiler/linkage.h"
11#include "src/compiler/machine-operator.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000012#include "src/compiler/node-properties.h"
13#include "src/compiler/operator-properties.h"
14#include "src/compiler/simplified-operator.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015
16namespace v8 {
17namespace internal {
18namespace compiler {
19
20ChangeLowering::~ChangeLowering() {}
21
22
23Reduction ChangeLowering::Reduce(Node* node) {
24 Node* control = graph()->start();
25 switch (node->opcode()) {
26 case IrOpcode::kChangeBitToBool:
27 return ChangeBitToBool(node->InputAt(0), control);
28 case IrOpcode::kChangeBoolToBit:
29 return ChangeBoolToBit(node->InputAt(0));
30 case IrOpcode::kChangeFloat64ToTagged:
31 return ChangeFloat64ToTagged(node->InputAt(0), control);
32 case IrOpcode::kChangeInt32ToTagged:
33 return ChangeInt32ToTagged(node->InputAt(0), control);
34 case IrOpcode::kChangeTaggedToFloat64:
35 return ChangeTaggedToFloat64(node->InputAt(0), control);
36 case IrOpcode::kChangeTaggedToInt32:
37 return ChangeTaggedToUI32(node->InputAt(0), control, kSigned);
38 case IrOpcode::kChangeTaggedToUint32:
39 return ChangeTaggedToUI32(node->InputAt(0), control, kUnsigned);
40 case IrOpcode::kChangeUint32ToTagged:
41 return ChangeUint32ToTagged(node->InputAt(0), control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 case IrOpcode::kLoadField:
43 return LoadField(node);
44 case IrOpcode::kStoreField:
45 return StoreField(node);
46 case IrOpcode::kLoadElement:
47 return LoadElement(node);
48 case IrOpcode::kStoreElement:
49 return StoreElement(node);
50 case IrOpcode::kAllocate:
51 return Allocate(node);
Ben Murdoch097c5b22016-05-18 11:27:45 +010052 case IrOpcode::kObjectIsReceiver:
53 return ObjectIsReceiver(node);
54 case IrOpcode::kObjectIsSmi:
55 return ObjectIsSmi(node);
56 case IrOpcode::kObjectIsNumber:
57 return ObjectIsNumber(node);
Ben Murdochda12d292016-06-02 14:46:10 +010058 case IrOpcode::kObjectIsUndetectable:
59 return ObjectIsUndetectable(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060 default:
61 return NoChange();
62 }
63 UNREACHABLE();
64 return NoChange();
65}
66
67
68Node* ChangeLowering::HeapNumberValueIndexConstant() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 return jsgraph()->IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070}
71
72
73Node* ChangeLowering::SmiMaxValueConstant() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000074 return jsgraph()->Int32Constant(Smi::kMaxValue);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075}
76
77
78Node* ChangeLowering::SmiShiftBitsConstant() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000079 return jsgraph()->IntPtrConstant(kSmiShiftSize + kSmiTagSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080}
81
82
83Node* ChangeLowering::AllocateHeapNumberWithValue(Node* value, Node* control) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040084 // The AllocateHeapNumberStub does not use the context, so we can safely pass
85 // in Smi zero here.
86 Callable callable = CodeFactory::AllocateHeapNumber(isolate());
Emily Bernierd0a1eb72015-03-24 16:35:39 -040087 Node* target = jsgraph()->HeapConstant(callable.code());
88 Node* context = jsgraph()->NoContextConstant();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000089 Node* effect = graph()->NewNode(common()->BeginRegion(), graph()->start());
90 if (!allocate_heap_number_operator_.is_set()) {
91 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor(
92 isolate(), jsgraph()->zone(), callable.descriptor(), 0,
93 CallDescriptor::kNoFlags, Operator::kNoThrow);
94 allocate_heap_number_operator_.set(common()->Call(descriptor));
95 }
96 Node* heap_number = graph()->NewNode(allocate_heap_number_operator_.get(),
97 target, context, effect, control);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000098 Node* store = graph()->NewNode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099 machine()->Store(StoreRepresentation(MachineRepresentation::kFloat64,
100 kNoWriteBarrier)),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101 heap_number, HeapNumberValueIndexConstant(), value, heap_number, control);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 return graph()->NewNode(common()->FinishRegion(), heap_number, store);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103}
104
105
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400106Node* ChangeLowering::ChangeInt32ToFloat64(Node* value) {
107 return graph()->NewNode(machine()->ChangeInt32ToFloat64(), value);
108}
109
110
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111Node* ChangeLowering::ChangeInt32ToSmi(Node* value) {
112 if (machine()->Is64()) {
113 value = graph()->NewNode(machine()->ChangeInt32ToInt64(), value);
114 }
115 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant());
116}
117
118
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400119Node* ChangeLowering::ChangeSmiToFloat64(Node* value) {
120 return ChangeInt32ToFloat64(ChangeSmiToInt32(value));
121}
122
123
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000124Node* ChangeLowering::ChangeSmiToInt32(Node* value) {
125 value = graph()->NewNode(machine()->WordSar(), value, SmiShiftBitsConstant());
126 if (machine()->Is64()) {
127 value = graph()->NewNode(machine()->TruncateInt64ToInt32(), value);
128 }
129 return value;
130}
131
132
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400133Node* ChangeLowering::ChangeUint32ToFloat64(Node* value) {
134 return graph()->NewNode(machine()->ChangeUint32ToFloat64(), value);
135}
136
137
138Node* ChangeLowering::ChangeUint32ToSmi(Node* value) {
139 if (machine()->Is64()) {
140 value = graph()->NewNode(machine()->ChangeUint32ToUint64(), value);
141 }
142 return graph()->NewNode(machine()->WordShl(), value, SmiShiftBitsConstant());
143}
144
145
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000146Node* ChangeLowering::LoadHeapNumberValue(Node* value, Node* control) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000147 return graph()->NewNode(machine()->Load(MachineType::Float64()), value,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400148 HeapNumberValueIndexConstant(), graph()->start(),
149 control);
150}
151
152
153Node* ChangeLowering::TestNotSmi(Node* value) {
154 STATIC_ASSERT(kSmiTag == 0);
155 STATIC_ASSERT(kSmiTagMask == 1);
156 return graph()->NewNode(machine()->WordAnd(), value,
157 jsgraph()->IntPtrConstant(kSmiTagMask));
158}
159
160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161Reduction ChangeLowering::ChangeBitToBool(Node* value, Node* control) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 return Replace(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000163 graph()->NewNode(common()->Select(MachineRepresentation::kTagged), value,
164 jsgraph()->TrueConstant(), jsgraph()->FalseConstant()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165}
166
167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168Reduction ChangeLowering::ChangeBoolToBit(Node* value) {
169 return Replace(graph()->NewNode(machine()->WordEqual(), value,
170 jsgraph()->TrueConstant()));
171}
172
173
174Reduction ChangeLowering::ChangeFloat64ToTagged(Node* value, Node* control) {
175 Type* const value_type = NodeProperties::GetType(value);
176 Node* const value32 = graph()->NewNode(
177 machine()->TruncateFloat64ToInt32(TruncationMode::kRoundToZero), value);
178 // TODO(bmeurer): This fast case must be disabled until we kill the asm.js
179 // support in the generic JavaScript pipeline, because LoadBuffer is lying
180 // about its result.
181 // if (value_type->Is(Type::Signed32())) {
182 // return ChangeInt32ToTagged(value32, control);
183 // }
184 Node* check_same = graph()->NewNode(
185 machine()->Float64Equal(), value,
186 graph()->NewNode(machine()->ChangeInt32ToFloat64(), value32));
187 Node* branch_same = graph()->NewNode(common()->Branch(), check_same, control);
188
189 Node* if_smi = graph()->NewNode(common()->IfTrue(), branch_same);
190 Node* vsmi;
191 Node* if_box = graph()->NewNode(common()->IfFalse(), branch_same);
192 Node* vbox;
193
194 // We only need to check for -0 if the {value} can potentially contain -0.
195 if (value_type->Maybe(Type::MinusZero())) {
196 Node* check_zero = graph()->NewNode(machine()->Word32Equal(), value32,
197 jsgraph()->Int32Constant(0));
198 Node* branch_zero = graph()->NewNode(common()->Branch(BranchHint::kFalse),
199 check_zero, if_smi);
200
201 Node* if_zero = graph()->NewNode(common()->IfTrue(), branch_zero);
202 Node* if_notzero = graph()->NewNode(common()->IfFalse(), branch_zero);
203
204 // In case of 0, we need to check the high bits for the IEEE -0 pattern.
205 Node* check_negative = graph()->NewNode(
206 machine()->Int32LessThan(),
207 graph()->NewNode(machine()->Float64ExtractHighWord32(), value),
208 jsgraph()->Int32Constant(0));
209 Node* branch_negative = graph()->NewNode(
210 common()->Branch(BranchHint::kFalse), check_negative, if_zero);
211
212 Node* if_negative = graph()->NewNode(common()->IfTrue(), branch_negative);
213 Node* if_notnegative =
214 graph()->NewNode(common()->IfFalse(), branch_negative);
215
216 // We need to create a box for negative 0.
217 if_smi = graph()->NewNode(common()->Merge(2), if_notzero, if_notnegative);
218 if_box = graph()->NewNode(common()->Merge(2), if_box, if_negative);
219 }
220
221 // On 64-bit machines we can just wrap the 32-bit integer in a smi, for 32-bit
222 // machines we need to deal with potential overflow and fallback to boxing.
223 if (machine()->Is64() || value_type->Is(Type::SignedSmall())) {
224 vsmi = ChangeInt32ToSmi(value32);
225 } else {
226 Node* smi_tag =
227 graph()->NewNode(machine()->Int32AddWithOverflow(), value32, value32);
228
229 Node* check_ovf = graph()->NewNode(common()->Projection(1), smi_tag);
230 Node* branch_ovf = graph()->NewNode(common()->Branch(BranchHint::kFalse),
231 check_ovf, if_smi);
232
233 Node* if_ovf = graph()->NewNode(common()->IfTrue(), branch_ovf);
234 if_box = graph()->NewNode(common()->Merge(2), if_ovf, if_box);
235
236 if_smi = graph()->NewNode(common()->IfFalse(), branch_ovf);
237 vsmi = graph()->NewNode(common()->Projection(0), smi_tag);
238 }
239
240 // Allocate the box for the {value}.
241 vbox = AllocateHeapNumberWithValue(value, if_box);
242
243 control = graph()->NewNode(common()->Merge(2), if_smi, if_box);
244 value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
245 vsmi, vbox, control);
246 return Replace(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000247}
248
249
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400250Reduction ChangeLowering::ChangeInt32ToTagged(Node* value, Node* control) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000251 if (machine()->Is64() ||
252 NodeProperties::GetType(value)->Is(Type::SignedSmall())) {
253 return Replace(ChangeInt32ToSmi(value));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000254 }
255
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400256 Node* add = graph()->NewNode(machine()->Int32AddWithOverflow(), value, value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000257
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 Node* ovf = graph()->NewNode(common()->Projection(1), add);
259 Node* branch =
260 graph()->NewNode(common()->Branch(BranchHint::kFalse), ovf, control);
261
262 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
263 Node* vtrue =
264 AllocateHeapNumberWithValue(ChangeInt32ToFloat64(value), if_true);
265
266 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
267 Node* vfalse = graph()->NewNode(common()->Projection(0), add);
268
269 Node* merge = graph()->NewNode(common()->Merge(2), if_true, if_false);
270 Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
271 vtrue, vfalse, merge);
272
273 return Replace(phi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000274}
275
276
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400277Reduction ChangeLowering::ChangeTaggedToUI32(Node* value, Node* control,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000278 Signedness signedness) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 if (NodeProperties::GetType(value)->Is(Type::TaggedSigned())) {
280 return Replace(ChangeSmiToInt32(value));
281 }
282
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000283 const Operator* op = (signedness == kSigned)
284 ? machine()->ChangeFloat64ToInt32()
285 : machine()->ChangeFloat64ToUint32();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000286
287 if (NodeProperties::GetType(value)->Is(Type::TaggedPointer())) {
288 return Replace(graph()->NewNode(op, LoadHeapNumberValue(value, control)));
289 }
290
291 Node* check = TestNotSmi(value);
292 Node* branch =
293 graph()->NewNode(common()->Branch(BranchHint::kFalse), check, control);
294
295 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
296 Node* vtrue = graph()->NewNode(op, LoadHeapNumberValue(value, if_true));
297
298 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
299 Node* vfalse = ChangeSmiToInt32(value);
300
301 Node* merge = graph()->NewNode(common()->Merge(2), if_true, if_false);
302 Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
303 vtrue, vfalse, merge);
304
305 return Replace(phi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306}
307
308
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400309namespace {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000310
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400311bool CanCover(Node* value, IrOpcode::Value opcode) {
312 if (value->opcode() != opcode) return false;
313 bool first = true;
314 for (Edge const edge : value->use_edges()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315 if (NodeProperties::IsControlEdge(edge)) continue;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400316 if (NodeProperties::IsEffectEdge(edge)) continue;
317 DCHECK(NodeProperties::IsValueEdge(edge));
318 if (!first) return false;
319 first = false;
320 }
321 return true;
322}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400324} // namespace
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000325
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000326
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400327Reduction ChangeLowering::ChangeTaggedToFloat64(Node* value, Node* control) {
328 if (CanCover(value, IrOpcode::kJSToNumber)) {
329 // ChangeTaggedToFloat64(JSToNumber(x)) =>
330 // if IsSmi(x) then ChangeSmiToFloat64(x)
331 // else let y = JSToNumber(x) in
332 // if IsSmi(y) then ChangeSmiToFloat64(y)
333 // else LoadHeapNumberValue(y)
334 Node* const object = NodeProperties::GetValueInput(value, 0);
335 Node* const context = NodeProperties::GetContextInput(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000336 Node* const frame_state = NodeProperties::GetFrameStateInput(value, 0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400337 Node* const effect = NodeProperties::GetEffectInput(value);
338 Node* const control = NodeProperties::GetControlInput(value);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000339
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 const Operator* merge_op = common()->Merge(2);
341 const Operator* ephi_op = common()->EffectPhi(2);
342 const Operator* phi_op = common()->Phi(MachineRepresentation::kFloat64, 2);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400343
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 Node* check1 = TestNotSmi(object);
345 Node* branch1 =
346 graph()->NewNode(common()->Branch(BranchHint::kFalse), check1, control);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400347
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000348 Node* if_true1 = graph()->NewNode(common()->IfTrue(), branch1);
349 Node* vtrue1 = graph()->NewNode(value->op(), object, context, frame_state,
350 effect, if_true1);
351 Node* etrue1 = vtrue1;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400352
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000353 Node* check2 = TestNotSmi(vtrue1);
354 Node* branch2 = graph()->NewNode(common()->Branch(), check2, if_true1);
355
356 Node* if_true2 = graph()->NewNode(common()->IfTrue(), branch2);
357 Node* vtrue2 = LoadHeapNumberValue(vtrue1, if_true2);
358
359 Node* if_false2 = graph()->NewNode(common()->IfFalse(), branch2);
360 Node* vfalse2 = ChangeSmiToFloat64(vtrue1);
361
362 if_true1 = graph()->NewNode(merge_op, if_true2, if_false2);
363 vtrue1 = graph()->NewNode(phi_op, vtrue2, vfalse2, if_true1);
364
365 Node* if_false1 = graph()->NewNode(common()->IfFalse(), branch1);
366 Node* vfalse1 = ChangeSmiToFloat64(object);
367 Node* efalse1 = effect;
368
369 Node* merge1 = graph()->NewNode(merge_op, if_true1, if_false1);
370 Node* ephi1 = graph()->NewNode(ephi_op, etrue1, efalse1, merge1);
371 Node* phi1 = graph()->NewNode(phi_op, vtrue1, vfalse1, merge1);
372
373 // Wire the new diamond into the graph, {JSToNumber} can still throw.
374 NodeProperties::ReplaceUses(value, phi1, ephi1, etrue1, etrue1);
375
376 // TODO(mstarzinger): This iteration cuts out the IfSuccess projection from
377 // the node and places it inside the diamond. Come up with a helper method!
378 for (Node* use : etrue1->uses()) {
379 if (use->opcode() == IrOpcode::kIfSuccess) {
380 use->ReplaceUses(merge1);
381 NodeProperties::ReplaceControlInput(branch2, use);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400382 }
383 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000384
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400385 return Replace(phi1);
386 }
387
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000388 Node* check = TestNotSmi(value);
389 Node* branch =
390 graph()->NewNode(common()->Branch(BranchHint::kFalse), check, control);
391
392 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
393 Node* vtrue = LoadHeapNumberValue(value, if_true);
394
395 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
396 Node* vfalse = ChangeSmiToFloat64(value);
397
398 Node* merge = graph()->NewNode(common()->Merge(2), if_true, if_false);
399 Node* phi = graph()->NewNode(
400 common()->Phi(MachineRepresentation::kFloat64, 2), vtrue, vfalse, merge);
401
402 return Replace(phi);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000403}
404
405
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400406Reduction ChangeLowering::ChangeUint32ToTagged(Node* value, Node* control) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000407 if (NodeProperties::GetType(value)->Is(Type::UnsignedSmall())) {
408 return Replace(ChangeUint32ToSmi(value));
409 }
410
411 Node* check = graph()->NewNode(machine()->Uint32LessThanOrEqual(), value,
412 SmiMaxValueConstant());
413 Node* branch =
414 graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
415
416 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
417 Node* vtrue = ChangeUint32ToSmi(value);
418
419 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
420 Node* vfalse =
421 AllocateHeapNumberWithValue(ChangeUint32ToFloat64(value), if_false);
422
423 Node* merge = graph()->NewNode(common()->Merge(2), if_true, if_false);
424 Node* phi = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
425 vtrue, vfalse, merge);
426
427 return Replace(phi);
428}
429
430
431namespace {
432
433WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged,
434 MachineRepresentation representation,
435 Type* field_type, Type* input_type) {
436 if (field_type->Is(Type::TaggedSigned()) ||
437 input_type->Is(Type::TaggedSigned())) {
438 // Write barriers are only for writes of heap objects.
439 return kNoWriteBarrier;
440 }
441 if (input_type->Is(Type::BooleanOrNullOrUndefined())) {
442 // Write barriers are not necessary when storing true, false, null or
443 // undefined, because these special oddballs are always in the root set.
444 return kNoWriteBarrier;
445 }
446 if (base_is_tagged == kTaggedBase &&
447 representation == MachineRepresentation::kTagged) {
448 if (input_type->IsConstant() &&
449 input_type->AsConstant()->Value()->IsHeapObject()) {
450 Handle<HeapObject> input =
451 Handle<HeapObject>::cast(input_type->AsConstant()->Value());
452 if (input->IsMap()) {
453 // Write barriers for storing maps are cheaper.
454 return kMapWriteBarrier;
455 }
456 Isolate* const isolate = input->GetIsolate();
457 RootIndexMap root_index_map(isolate);
458 int root_index = root_index_map.Lookup(*input);
459 if (root_index != RootIndexMap::kInvalidRootIndex &&
460 isolate->heap()->RootIsImmortalImmovable(root_index)) {
461 // Write barriers are unnecessary for immortal immovable roots.
462 return kNoWriteBarrier;
463 }
464 }
465 if (field_type->Is(Type::TaggedPointer()) ||
466 input_type->Is(Type::TaggedPointer())) {
467 // Write barriers for heap objects don't need a Smi check.
468 return kPointerWriteBarrier;
469 }
470 // Write barriers are only for writes into heap objects (i.e. tagged base).
471 return kFullWriteBarrier;
472 }
473 return kNoWriteBarrier;
474}
475
476
477WriteBarrierKind ComputeWriteBarrierKind(BaseTaggedness base_is_tagged,
478 MachineRepresentation representation,
479 int field_offset, Type* field_type,
480 Type* input_type) {
481 if (base_is_tagged == kTaggedBase && field_offset == HeapObject::kMapOffset) {
482 // Write barriers for storing maps are cheaper.
483 return kMapWriteBarrier;
484 }
485 return ComputeWriteBarrierKind(base_is_tagged, representation, field_type,
486 input_type);
487}
488
489} // namespace
490
491
492Reduction ChangeLowering::LoadField(Node* node) {
493 const FieldAccess& access = FieldAccessOf(node->op());
494 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag());
495 node->InsertInput(graph()->zone(), 1, offset);
496 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
497 return Changed(node);
498}
499
500
501Reduction ChangeLowering::StoreField(Node* node) {
502 const FieldAccess& access = FieldAccessOf(node->op());
503 Type* type = NodeProperties::GetType(node->InputAt(1));
504 WriteBarrierKind kind = ComputeWriteBarrierKind(
505 access.base_is_tagged, access.machine_type.representation(),
506 access.offset, access.type, type);
507 Node* offset = jsgraph()->IntPtrConstant(access.offset - access.tag());
508 node->InsertInput(graph()->zone(), 1, offset);
509 NodeProperties::ChangeOp(node,
510 machine()->Store(StoreRepresentation(
511 access.machine_type.representation(), kind)));
512 return Changed(node);
513}
514
515
516Node* ChangeLowering::ComputeIndex(const ElementAccess& access,
517 Node* const key) {
518 Node* index = key;
519 const int element_size_shift =
520 ElementSizeLog2Of(access.machine_type.representation());
521 if (element_size_shift) {
522 index = graph()->NewNode(machine()->Word32Shl(), index,
523 jsgraph()->Int32Constant(element_size_shift));
524 }
525 const int fixed_offset = access.header_size - access.tag();
526 if (fixed_offset) {
527 index = graph()->NewNode(machine()->Int32Add(), index,
528 jsgraph()->Int32Constant(fixed_offset));
529 }
530 if (machine()->Is64()) {
531 // TODO(turbofan): This is probably only correct for typed arrays, and only
532 // if the typed arrays are at most 2GiB in size, which happens to match
533 // exactly our current situation.
534 index = graph()->NewNode(machine()->ChangeUint32ToUint64(), index);
535 }
536 return index;
537}
538
539
540Reduction ChangeLowering::LoadElement(Node* node) {
541 const ElementAccess& access = ElementAccessOf(node->op());
542 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1)));
543 NodeProperties::ChangeOp(node, machine()->Load(access.machine_type));
544 return Changed(node);
545}
546
547
548Reduction ChangeLowering::StoreElement(Node* node) {
549 const ElementAccess& access = ElementAccessOf(node->op());
550 Type* type = NodeProperties::GetType(node->InputAt(2));
551 node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1)));
552 NodeProperties::ChangeOp(
553 node, machine()->Store(StoreRepresentation(
554 access.machine_type.representation(),
555 ComputeWriteBarrierKind(access.base_is_tagged,
556 access.machine_type.representation(),
557 access.type, type))));
558 return Changed(node);
559}
560
561
562Reduction ChangeLowering::Allocate(Node* node) {
563 PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op());
564 if (pretenure == NOT_TENURED) {
565 Callable callable = CodeFactory::AllocateInNewSpace(isolate());
566 Node* target = jsgraph()->HeapConstant(callable.code());
567 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor(
568 isolate(), jsgraph()->zone(), callable.descriptor(), 0,
569 CallDescriptor::kNoFlags, Operator::kNoThrow);
570 const Operator* op = common()->Call(descriptor);
571 node->InsertInput(graph()->zone(), 0, target);
572 node->InsertInput(graph()->zone(), 2, jsgraph()->NoContextConstant());
573 NodeProperties::ChangeOp(node, op);
574 } else {
575 DCHECK_EQ(TENURED, pretenure);
576 AllocationSpace space = OLD_SPACE;
577 Runtime::FunctionId f = Runtime::kAllocateInTargetSpace;
578 Operator::Properties props = node->op()->properties();
579 CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
580 jsgraph()->zone(), f, 2, props, CallDescriptor::kNeedsFrameState);
581 ExternalReference ref(f, jsgraph()->isolate());
582 int32_t flags = AllocateTargetSpace::encode(space);
583 node->InsertInput(graph()->zone(), 0, jsgraph()->CEntryStubConstant(1));
584 node->InsertInput(graph()->zone(), 2, jsgraph()->SmiConstant(flags));
585 node->InsertInput(graph()->zone(), 3, jsgraph()->ExternalConstant(ref));
586 node->InsertInput(graph()->zone(), 4, jsgraph()->Int32Constant(2));
587 node->InsertInput(graph()->zone(), 5, jsgraph()->NoContextConstant());
588 NodeProperties::ChangeOp(node, common()->Call(desc));
589 }
590 return Changed(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000591}
592
Ben Murdoch097c5b22016-05-18 11:27:45 +0100593Node* ChangeLowering::IsSmi(Node* value) {
594 return graph()->NewNode(
595 machine()->WordEqual(),
596 graph()->NewNode(machine()->WordAnd(), value,
597 jsgraph()->IntPtrConstant(kSmiTagMask)),
598 jsgraph()->IntPtrConstant(kSmiTag));
599}
600
601Node* ChangeLowering::LoadHeapObjectMap(Node* object, Node* control) {
602 return graph()->NewNode(
603 machine()->Load(MachineType::AnyTagged()), object,
604 jsgraph()->IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag),
605 graph()->start(), control);
606}
607
Ben Murdochda12d292016-06-02 14:46:10 +0100608Node* ChangeLowering::LoadMapBitField(Node* map) {
609 return graph()->NewNode(
610 machine()->Load(MachineType::Uint8()), map,
611 jsgraph()->IntPtrConstant(Map::kBitFieldOffset - kHeapObjectTag),
612 graph()->start(), graph()->start());
613}
614
Ben Murdoch097c5b22016-05-18 11:27:45 +0100615Node* ChangeLowering::LoadMapInstanceType(Node* map) {
616 return graph()->NewNode(
617 machine()->Load(MachineType::Uint8()), map,
618 jsgraph()->IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag),
619 graph()->start(), graph()->start());
620}
621
622Reduction ChangeLowering::ObjectIsNumber(Node* node) {
623 Node* input = NodeProperties::GetValueInput(node, 0);
624 // TODO(bmeurer): Optimize somewhat based on input type.
625 Node* check = IsSmi(input);
626 Node* branch = graph()->NewNode(common()->Branch(), check, graph()->start());
627 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
628 Node* vtrue = jsgraph()->Int32Constant(1);
629 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
630 Node* vfalse = graph()->NewNode(
631 machine()->WordEqual(), LoadHeapObjectMap(input, if_false),
632 jsgraph()->HeapConstant(isolate()->factory()->heap_number_map()));
633 Node* control = graph()->NewNode(common()->Merge(2), if_true, if_false);
634 node->ReplaceInput(0, vtrue);
635 node->AppendInput(graph()->zone(), vfalse);
636 node->AppendInput(graph()->zone(), control);
637 NodeProperties::ChangeOp(node, common()->Phi(MachineRepresentation::kBit, 2));
638 return Changed(node);
639}
640
641Reduction ChangeLowering::ObjectIsReceiver(Node* node) {
642 Node* input = NodeProperties::GetValueInput(node, 0);
643 // TODO(bmeurer): Optimize somewhat based on input type.
644 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
645 Node* check = IsSmi(input);
646 Node* branch = graph()->NewNode(common()->Branch(), check, graph()->start());
647 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
648 Node* vtrue = jsgraph()->Int32Constant(0);
649 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
650 Node* vfalse =
651 graph()->NewNode(machine()->Uint32LessThanOrEqual(),
652 jsgraph()->Uint32Constant(FIRST_JS_RECEIVER_TYPE),
653 LoadMapInstanceType(LoadHeapObjectMap(input, if_false)));
654 Node* control = graph()->NewNode(common()->Merge(2), if_true, if_false);
655 node->ReplaceInput(0, vtrue);
656 node->AppendInput(graph()->zone(), vfalse);
657 node->AppendInput(graph()->zone(), control);
658 NodeProperties::ChangeOp(node, common()->Phi(MachineRepresentation::kBit, 2));
659 return Changed(node);
660}
661
Ben Murdochda12d292016-06-02 14:46:10 +0100662Reduction ChangeLowering::ObjectIsUndetectable(Node* node) {
663 Node* input = NodeProperties::GetValueInput(node, 0);
664 // TODO(bmeurer): Optimize somewhat based on input type.
665 Node* check = IsSmi(input);
666 Node* branch = graph()->NewNode(common()->Branch(), check, graph()->start());
667 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
668 Node* vtrue = jsgraph()->Int32Constant(0);
669 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
670 Node* vfalse = graph()->NewNode(
671 machine()->Word32Equal(),
672 graph()->NewNode(
673 machine()->Word32Equal(),
674 graph()->NewNode(machine()->Word32And(),
675 jsgraph()->Uint32Constant(1 << Map::kIsUndetectable),
676 LoadMapBitField(LoadHeapObjectMap(input, if_false))),
677 jsgraph()->Int32Constant(0)),
678 jsgraph()->Int32Constant(0));
679 Node* control = graph()->NewNode(common()->Merge(2), if_true, if_false);
680 node->ReplaceInput(0, vtrue);
681 node->AppendInput(graph()->zone(), vfalse);
682 node->AppendInput(graph()->zone(), control);
683 NodeProperties::ChangeOp(node, common()->Phi(MachineRepresentation::kBit, 2));
684 return Changed(node);
685}
686
Ben Murdoch097c5b22016-05-18 11:27:45 +0100687Reduction ChangeLowering::ObjectIsSmi(Node* node) {
688 node->ReplaceInput(0,
689 graph()->NewNode(machine()->WordAnd(), node->InputAt(0),
690 jsgraph()->IntPtrConstant(kSmiTagMask)));
691 node->AppendInput(graph()->zone(), jsgraph()->IntPtrConstant(kSmiTag));
692 NodeProperties::ChangeOp(node, machine()->WordEqual());
693 return Changed(node);
694}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695
696Isolate* ChangeLowering::isolate() const { return jsgraph()->isolate(); }
697
698
699Graph* ChangeLowering::graph() const { return jsgraph()->graph(); }
700
701
702CommonOperatorBuilder* ChangeLowering::common() const {
703 return jsgraph()->common();
704}
705
706
707MachineOperatorBuilder* ChangeLowering::machine() const {
708 return jsgraph()->machine();
709}
710
711} // namespace compiler
712} // namespace internal
713} // namespace v8