blob: 9784091fbbe827738e785b86fef3f448031b6691 [file] [log] [blame]
Michael Kruse138a3fb2017-08-04 22:51:23 +00001//===------ ZoneAlgo.cpp ----------------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// Derive information about array elements between statements ("Zones").
11//
12// The algorithms here work on the scatter space - the image space of the
13// schedule returned by Scop::getSchedule(). We call an element in that space a
14// "timepoint". Timepoints are lexicographically ordered such that we can
15// defined ranges in the scatter space. We use two flavors of such ranges:
16// Timepoint sets and zones. A timepoint set is simply a subset of the scatter
17// space and is directly stored as isl_set.
18//
19// Zones are used to describe the space between timepoints as open sets, i.e.
20// they do not contain the extrema. Using isl rational sets to express these
21// would be overkill. We also cannot store them as the integer timepoints they
22// contain; the (nonempty) zone between 1 and 2 would be empty and
23// indistinguishable from e.g. the zone between 3 and 4. Also, we cannot store
24// the integer set including the extrema; the set ]1,2[ + ]3,4[ could be
25// coalesced to ]1,3[, although we defined the range [2,3] to be not in the set.
26// Instead, we store the "half-open" integer extrema, including the lower bound,
27// but excluding the upper bound. Examples:
28//
29// * The set { [i] : 1 <= i <= 3 } represents the zone ]0,3[ (which contains the
30// integer points 1 and 2, but not 0 or 3)
31//
32// * { [1] } represents the zone ]0,1[
33//
34// * { [i] : i = 1 or i = 3 } represents the zone ]0,1[ + ]2,3[
35//
36// Therefore, an integer i in the set represents the zone ]i-1,i[, i.e. strictly
37// speaking the integer points never belong to the zone. However, depending an
38// the interpretation, one might want to include them. Part of the
39// interpretation may not be known when the zone is constructed.
40//
41// Reads are assumed to always take place before writes, hence we can think of
42// reads taking place at the beginning of a timepoint and writes at the end.
43//
44// Let's assume that the zone represents the lifetime of a variable. That is,
45// the zone begins with a write that defines the value during its lifetime and
46// ends with the last read of that value. In the following we consider whether a
47// read/write at the beginning/ending of the lifetime zone should be within the
48// zone or outside of it.
49//
50// * A read at the timepoint that starts the live-range loads the previous
51// value. Hence, exclude the timepoint starting the zone.
52//
53// * A write at the timepoint that starts the live-range is not defined whether
54// it occurs before or after the write that starts the lifetime. We do not
55// allow this situation to occur. Hence, we include the timepoint starting the
56// zone to determine whether they are conflicting.
57//
58// * A read at the timepoint that ends the live-range reads the same variable.
59// We include the timepoint at the end of the zone to include that read into
60// the live-range. Doing otherwise would mean that the two reads access
61// different values, which would mean that the value they read are both alive
62// at the same time but occupy the same variable.
63//
64// * A write at the timepoint that ends the live-range starts a new live-range.
65// It must not be included in the live-range of the previous definition.
66//
67// All combinations of reads and writes at the endpoints are possible, but most
68// of the time only the write->read (for instance, a live-range from definition
69// to last use) and read->write (for instance, an unused range from last use to
70// overwrite) and combinations are interesting (half-open ranges). write->write
71// zones might be useful as well in some context to represent
72// output-dependencies.
73//
74// @see convertZoneToTimepoints
75//
76//
77// The code makes use of maps and sets in many different spaces. To not loose
78// track in which space a set or map is expected to be in, variables holding an
79// isl reference are usually annotated in the comments. They roughly follow isl
80// syntax for spaces, but only the tuples, not the dimensions. The tuples have a
81// meaning as follows:
82//
83// * Space[] - An unspecified tuple. Used for function parameters such that the
84// function caller can use it for anything they like.
85//
86// * Domain[] - A statement instance as returned by ScopStmt::getDomain()
87// isl_id_get_name: Stmt_<NameOfBasicBlock>
88// isl_id_get_user: Pointer to ScopStmt
89//
90// * Element[] - An array element as in the range part of
91// MemoryAccess::getAccessRelation()
92// isl_id_get_name: MemRef_<NameOfArrayVariable>
93// isl_id_get_user: Pointer to ScopArrayInfo
94//
95// * Scatter[] - Scatter space or space of timepoints
96// Has no tuple id
97//
98// * Zone[] - Range between timepoints as described above
99// Has no tuple id
100//
101// * ValInst[] - An llvm::Value as defined at a specific timepoint.
102//
103// A ValInst[] itself can be structured as one of:
104//
105// * [] - An unknown value.
106// Always zero dimensions
107// Has no tuple id
108//
109// * Value[] - An llvm::Value that is read-only in the SCoP, i.e. its
110// runtime content does not depend on the timepoint.
111// Always zero dimensions
112// isl_id_get_name: Val_<NameOfValue>
113// isl_id_get_user: A pointer to an llvm::Value
114//
115// * SCEV[...] - A synthesizable llvm::SCEV Expression.
116// In contrast to a Value[] is has at least one dimension per
117// SCEVAddRecExpr in the SCEV.
118//
119// * [Domain[] -> Value[]] - An llvm::Value that may change during the
120// Scop's execution.
121// The tuple itself has no id, but it wraps a map space holding a
122// statement instance which defines the llvm::Value as the map's domain
123// and llvm::Value itself as range.
124//
125// @see makeValInst()
126//
127// An annotation "{ Domain[] -> Scatter[] }" therefore means: A map from a
128// statement instance to a timepoint, aka a schedule. There is only one scatter
129// space, but most of the time multiple statements are processed in one set.
130// This is why most of the time isl_union_map has to be used.
131//
132// The basic algorithm works as follows:
133// At first we verify that the SCoP is compatible with this technique. For
134// instance, two writes cannot write to the same location at the same statement
135// instance because we cannot determine within the polyhedral model which one
136// comes first. Once this was verified, we compute zones at which an array
137// element is unused. This computation can fail if it takes too long. Then the
138// main algorithm is executed. Because every store potentially trails an unused
139// zone, we start at stores. We search for a scalar (MemoryKind::Value or
140// MemoryKind::PHI) that we can map to the array element overwritten by the
141// store, preferably one that is used by the store or at least the ScopStmt.
142// When it does not conflict with the lifetime of the values in the array
143// element, the map is applied and the unused zone updated as it is now used. We
144// continue to try to map scalars to the array element until there are no more
145// candidates to map. The algorithm is greedy in the sense that the first scalar
146// not conflicting will be mapped. Other scalars processed later that could have
147// fit the same unused zone will be rejected. As such the result depends on the
148// processing order.
149//
150//===----------------------------------------------------------------------===//
151
152#include "polly/ZoneAlgo.h"
153#include "polly/ScopInfo.h"
154#include "polly/Support/GICHelper.h"
155#include "polly/Support/ISLTools.h"
156#include "polly/Support/VirtualInstruction.h"
Michael Kruse47281842017-08-28 20:39:07 +0000157#include "llvm/ADT/Statistic.h"
Zhaoshi Zhengceec1752017-11-17 22:05:19 +0000158#include "llvm/Support/raw_ostream.h"
Michael Kruse138a3fb2017-08-04 22:51:23 +0000159
160#define DEBUG_TYPE "polly-zone"
161
Michael Kruse47281842017-08-28 20:39:07 +0000162STATISTIC(NumIncompatibleArrays, "Number of not zone-analyzable arrays");
163STATISTIC(NumCompatibleArrays, "Number of zone-analyzable arrays");
Michael Kruse68821a82017-10-31 16:11:46 +0000164STATISTIC(NumRecursivePHIs, "Number of recursive PHIs");
165STATISTIC(NumNormalizablePHIs, "Number of normalizable PHIs");
166STATISTIC(NumPHINormialization, "Number of PHI executed normalizations");
Michael Kruse47281842017-08-28 20:39:07 +0000167
Michael Kruse138a3fb2017-08-04 22:51:23 +0000168using namespace polly;
169using namespace llvm;
170
171static isl::union_map computeReachingDefinition(isl::union_map Schedule,
172 isl::union_map Writes,
173 bool InclDef, bool InclRedef) {
174 return computeReachingWrite(Schedule, Writes, false, InclDef, InclRedef);
175}
176
177/// Compute the reaching definition of a scalar.
178///
179/// Compared to computeReachingDefinition, there is just one element which is
180/// accessed and therefore only a set if instances that accesses that element is
181/// required.
182///
183/// @param Schedule { DomainWrite[] -> Scatter[] }
184/// @param Writes { DomainWrite[] }
185/// @param InclDef Include the timepoint of the definition to the result.
186/// @param InclRedef Include the timepoint of the overwrite into the result.
187///
188/// @return { Scatter[] -> DomainWrite[] }
189static isl::union_map computeScalarReachingDefinition(isl::union_map Schedule,
190 isl::union_set Writes,
191 bool InclDef,
192 bool InclRedef) {
Michael Kruse138a3fb2017-08-04 22:51:23 +0000193 // { DomainWrite[] -> Element[] }
Tobias Grosser0dd42512017-08-21 14:19:40 +0000194 isl::union_map Defs = isl::union_map::from_domain(Writes);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000195
196 // { [Element[] -> Scatter[]] -> DomainWrite[] }
197 auto ReachDefs =
198 computeReachingDefinition(Schedule, Defs, InclDef, InclRedef);
199
200 // { Scatter[] -> DomainWrite[] }
Tobias Grosser0dd42512017-08-21 14:19:40 +0000201 return ReachDefs.curry().range().unwrap();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000202}
203
204/// Compute the reaching definition of a scalar.
205///
206/// This overload accepts only a single writing statement as an isl_map,
207/// consequently the result also is only a single isl_map.
208///
209/// @param Schedule { DomainWrite[] -> Scatter[] }
210/// @param Writes { DomainWrite[] }
211/// @param InclDef Include the timepoint of the definition to the result.
212/// @param InclRedef Include the timepoint of the overwrite into the result.
213///
214/// @return { Scatter[] -> DomainWrite[] }
215static isl::map computeScalarReachingDefinition(isl::union_map Schedule,
216 isl::set Writes, bool InclDef,
217 bool InclRedef) {
Tobias Grosser0dd42512017-08-21 14:19:40 +0000218 isl::space DomainSpace = Writes.get_space();
219 isl::space ScatterSpace = getScatterSpace(Schedule);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000220
221 // { Scatter[] -> DomainWrite[] }
Tobias Grosser0dd42512017-08-21 14:19:40 +0000222 isl::union_map UMap = computeScalarReachingDefinition(
223 Schedule, isl::union_set(Writes), InclDef, InclRedef);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000224
Tobias Grosser0dd42512017-08-21 14:19:40 +0000225 isl::space ResultSpace = ScatterSpace.map_from_domain_and_range(DomainSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000226 return singleton(UMap, ResultSpace);
227}
228
229isl::union_map polly::makeUnknownForDomain(isl::union_set Domain) {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000230 return isl::union_map::from_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000231}
232
233/// Create a domain-to-unknown value mapping.
234///
235/// @see makeUnknownForDomain(isl::union_set)
236///
237/// @param Domain { Domain[] }
238///
239/// @return { Domain[] -> ValInst[] }
240static isl::map makeUnknownForDomain(isl::set Domain) {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000241 return isl::map::from_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000242}
243
Michael Kruse70af4f52017-08-07 18:40:29 +0000244/// Return whether @p Map maps to an unknown value.
245///
246/// @param { [] -> ValInst[] }
247static bool isMapToUnknown(const isl::map &Map) {
248 isl::space Space = Map.get_space().range();
249 return Space.has_tuple_id(isl::dim::set).is_false() &&
250 Space.is_wrapping().is_false() && Space.dim(isl::dim::set) == 0;
251}
252
Michael Krusece673582017-08-08 17:00:27 +0000253isl::union_map polly::filterKnownValInst(const isl::union_map &UMap) {
Michael Kruse70af4f52017-08-07 18:40:29 +0000254 isl::union_map Result = isl::union_map::empty(UMap.get_space());
Michael Kruse630fc7b2017-08-09 11:21:40 +0000255 isl::stat Success = UMap.foreach_map([=, &Result](isl::map Map) -> isl::stat {
Michael Kruse70af4f52017-08-07 18:40:29 +0000256 if (!isMapToUnknown(Map))
257 Result = Result.add_map(Map);
258 return isl::stat::ok;
259 });
Michael Kruse630fc7b2017-08-09 11:21:40 +0000260 if (Success != isl::stat::ok)
261 return {};
Michael Kruse70af4f52017-08-07 18:40:29 +0000262 return Result;
263}
264
Michael Kruse138a3fb2017-08-04 22:51:23 +0000265ZoneAlgorithm::ZoneAlgorithm(const char *PassName, Scop *S, LoopInfo *LI)
266 : PassName(PassName), IslCtx(S->getSharedIslCtx()), S(S), LI(LI),
Tobias Grosser61bd3a42017-08-06 21:42:38 +0000267 Schedule(S->getSchedule()) {
Tobias Grosser31df6f32017-08-06 21:42:25 +0000268 auto Domains = S->getDomains();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000269
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000270 Schedule = Schedule.intersect_domain(Domains);
271 ParamSpace = Schedule.get_space();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000272 ScatterSpace = getScatterSpace(Schedule);
273}
274
Tobias Grosser2ef37812017-08-07 22:01:29 +0000275/// Check if all stores in @p Stmt store the very same value.
276///
Michael Kruse8756b3f2017-08-09 09:29:15 +0000277/// This covers a special situation occurring in Polybench's
278/// covariance/correlation (which is typical for algorithms that cover symmetric
279/// matrices):
280///
281/// for (int i = 0; i < n; i += 1)
282/// for (int j = 0; j <= i; j += 1) {
283/// double x = ...;
284/// C[i][j] = x;
285/// C[j][i] = x;
286/// }
287///
288/// For i == j, the same value is written twice to the same element.Double
289/// writes to the same element are not allowed in DeLICM because its algorithm
290/// does not see which of the writes is effective.But if its the same value
291/// anyway, it doesn't matter.
292///
293/// LLVM passes, however, cannot simplify this because the write is necessary
294/// for i != j (unless it would add a condition for one of the writes to occur
295/// only if i != j).
296///
Tobias Grosser2ef37812017-08-07 22:01:29 +0000297/// TODO: In the future we may want to extent this to make the checks
298/// specific to different memory locations.
299static bool onlySameValueWrites(ScopStmt *Stmt) {
300 Value *V = nullptr;
301
302 for (auto *MA : *Stmt) {
303 if (!MA->isLatestArrayKind() || !MA->isMustWrite() ||
304 !MA->isOriginalArrayKind())
305 continue;
306
307 if (!V) {
308 V = MA->getAccessValue();
309 continue;
310 }
311
312 if (V != MA->getAccessValue())
313 return false;
314 }
315 return true;
316}
317
Michael Kruse47281842017-08-28 20:39:07 +0000318void ZoneAlgorithm::collectIncompatibleElts(ScopStmt *Stmt,
319 isl::union_set &IncompatibleElts,
320 isl::union_set &AllElts) {
Michael Kruse138a3fb2017-08-04 22:51:23 +0000321 auto Stores = makeEmptyUnionMap();
322 auto Loads = makeEmptyUnionMap();
323
324 // This assumes that the MemoryKind::Array MemoryAccesses are iterated in
325 // order.
326 for (auto *MA : *Stmt) {
Michael Kruseff426d92017-10-31 12:50:25 +0000327 if (!MA->isOriginalArrayKind())
Michael Kruse138a3fb2017-08-04 22:51:23 +0000328 continue;
329
Michael Kruse47281842017-08-28 20:39:07 +0000330 isl::map AccRelMap = getAccessRelationFor(MA);
331 isl::union_map AccRel = AccRelMap;
332
333 // To avoid solving any ILP problems, always add entire arrays instead of
334 // just the elements that are accessed.
335 auto ArrayElts = isl::set::universe(AccRelMap.get_space().range());
336 AllElts = AllElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000337
338 if (MA->isRead()) {
339 // Reject load after store to same location.
340 if (!isl_union_map_is_disjoint(Stores.keep(), AccRel.keep())) {
Michael Krusee983e6b2017-08-28 11:22:23 +0000341 DEBUG(dbgs() << "Load after store of same element in same statement\n");
Michael Kruse138a3fb2017-08-04 22:51:23 +0000342 OptimizationRemarkMissed R(PassName, "LoadAfterStore",
343 MA->getAccessInstruction());
344 R << "load after store of same element in same statement";
345 R << " (previous stores: " << Stores;
346 R << ", loading: " << AccRel << ")";
347 S->getFunction().getContext().diagnose(R);
Michael Kruse47281842017-08-28 20:39:07 +0000348
349 IncompatibleElts = IncompatibleElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000350 }
351
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000352 Loads = Loads.unite(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000353
354 continue;
355 }
356
Michael Kruse138a3fb2017-08-04 22:51:23 +0000357 // In region statements the order is less clear, eg. the load and store
358 // might be in a boxed loop.
359 if (Stmt->isRegionStmt() &&
360 !isl_union_map_is_disjoint(Loads.keep(), AccRel.keep())) {
Michael Krusee983e6b2017-08-28 11:22:23 +0000361 DEBUG(dbgs() << "WRITE in non-affine subregion not supported\n");
Michael Kruse138a3fb2017-08-04 22:51:23 +0000362 OptimizationRemarkMissed R(PassName, "StoreInSubregion",
363 MA->getAccessInstruction());
364 R << "store is in a non-affine subregion";
365 S->getFunction().getContext().diagnose(R);
Michael Kruse47281842017-08-28 20:39:07 +0000366
367 IncompatibleElts = IncompatibleElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000368 }
369
370 // Do not allow more than one store to the same location.
Michael Krusea9033aa2017-08-09 09:29:09 +0000371 if (!isl_union_map_is_disjoint(Stores.keep(), AccRel.keep()) &&
372 !onlySameValueWrites(Stmt)) {
Michael Krusee983e6b2017-08-28 11:22:23 +0000373 DEBUG(dbgs() << "WRITE after WRITE to same element\n");
Michael Kruse138a3fb2017-08-04 22:51:23 +0000374 OptimizationRemarkMissed R(PassName, "StoreAfterStore",
375 MA->getAccessInstruction());
Michael Krusea9033aa2017-08-09 09:29:09 +0000376 R << "store after store of same element in same statement";
377 R << " (previous stores: " << Stores;
378 R << ", storing: " << AccRel << ")";
379 S->getFunction().getContext().diagnose(R);
Michael Kruse47281842017-08-28 20:39:07 +0000380
381 IncompatibleElts = IncompatibleElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000382 }
383
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000384 Stores = Stores.unite(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000385 }
Michael Kruse138a3fb2017-08-04 22:51:23 +0000386}
387
388void ZoneAlgorithm::addArrayReadAccess(MemoryAccess *MA) {
389 assert(MA->isLatestArrayKind());
390 assert(MA->isRead());
Michael Kruse70af4f52017-08-07 18:40:29 +0000391 ScopStmt *Stmt = MA->getStatement();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000392
393 // { DomainRead[] -> Element[] }
Michael Kruse47281842017-08-28 20:39:07 +0000394 auto AccRel = intersectRange(getAccessRelationFor(MA), CompatibleElts);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000395 AllReads = AllReads.add_map(AccRel);
Michael Kruse70af4f52017-08-07 18:40:29 +0000396
397 if (LoadInst *Load = dyn_cast_or_null<LoadInst>(MA->getAccessInstruction())) {
398 // { DomainRead[] -> ValInst[] }
399 isl::map LoadValInst = makeValInst(
400 Load, Stmt, LI->getLoopFor(Load->getParent()), Stmt->isBlockStmt());
401
402 // { DomainRead[] -> [Element[] -> DomainRead[]] }
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000403 isl::map IncludeElement = AccRel.domain_map().curry();
Michael Kruse70af4f52017-08-07 18:40:29 +0000404
405 // { [Element[] -> DomainRead[]] -> ValInst[] }
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000406 isl::map EltLoadValInst = LoadValInst.apply_domain(IncludeElement);
Michael Kruse70af4f52017-08-07 18:40:29 +0000407
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000408 AllReadValInst = AllReadValInst.add_map(EltLoadValInst);
Michael Kruse70af4f52017-08-07 18:40:29 +0000409 }
Michael Kruse138a3fb2017-08-04 22:51:23 +0000410}
411
Michael Kruse68821a82017-10-31 16:11:46 +0000412isl::union_map ZoneAlgorithm::getWrittenValue(MemoryAccess *MA,
413 isl::map AccRel) {
Michael Krusebd84ce82017-09-06 12:40:55 +0000414 if (!MA->isMustWrite())
415 return {};
416
417 Value *AccVal = MA->getAccessValue();
418 ScopStmt *Stmt = MA->getStatement();
419 Instruction *AccInst = MA->getAccessInstruction();
420
421 // Write a value to a single element.
422 auto L = MA->isOriginalArrayKind() ? LI->getLoopFor(AccInst->getParent())
423 : Stmt->getSurroundingLoop();
424 if (AccVal &&
425 AccVal->getType() == MA->getLatestScopArrayInfo()->getElementType() &&
Michael Kruseef8325b2017-09-18 17:43:50 +0000426 AccRel.is_single_valued().is_true())
Michael Kruse68821a82017-10-31 16:11:46 +0000427 return makeNormalizedValInst(AccVal, Stmt, L);
Michael Krusebd84ce82017-09-06 12:40:55 +0000428
429 // memset(_, '0', ) is equivalent to writing the null value to all touched
430 // elements. isMustWrite() ensures that all of an element's bytes are
431 // overwritten.
432 if (auto *Memset = dyn_cast<MemSetInst>(AccInst)) {
433 auto *WrittenConstant = dyn_cast<Constant>(Memset->getValue());
434 Type *Ty = MA->getLatestScopArrayInfo()->getElementType();
435 if (WrittenConstant && WrittenConstant->isZeroValue()) {
436 Constant *Zero = Constant::getNullValue(Ty);
Michael Kruse68821a82017-10-31 16:11:46 +0000437 return makeNormalizedValInst(Zero, Stmt, L);
Michael Krusebd84ce82017-09-06 12:40:55 +0000438 }
439 }
440
441 return {};
442}
443
Michael Kruse138a3fb2017-08-04 22:51:23 +0000444void ZoneAlgorithm::addArrayWriteAccess(MemoryAccess *MA) {
445 assert(MA->isLatestArrayKind());
446 assert(MA->isWrite());
447 auto *Stmt = MA->getStatement();
448
449 // { Domain[] -> Element[] }
Michael Kruse983fa9b2017-10-24 16:40:34 +0000450 isl::map AccRel = intersectRange(getAccessRelationFor(MA), CompatibleElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000451
452 if (MA->isMustWrite())
Michael Kruse983fa9b2017-10-24 16:40:34 +0000453 AllMustWrites = AllMustWrites.add_map(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000454
455 if (MA->isMayWrite())
Michael Kruse983fa9b2017-10-24 16:40:34 +0000456 AllMayWrites = AllMayWrites.add_map(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000457
458 // { Domain[] -> ValInst[] }
Michael Kruse68821a82017-10-31 16:11:46 +0000459 isl::union_map WriteValInstance = getWrittenValue(MA, AccRel);
Michael Krusebd84ce82017-09-06 12:40:55 +0000460 if (!WriteValInstance)
461 WriteValInstance = makeUnknownForDomain(Stmt);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000462
463 // { Domain[] -> [Element[] -> Domain[]] }
Michael Kruse983fa9b2017-10-24 16:40:34 +0000464 isl::map IncludeElement = AccRel.domain_map().curry();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000465
466 // { [Element[] -> DomainWrite[]] -> ValInst[] }
Michael Kruse68821a82017-10-31 16:11:46 +0000467 isl::union_map EltWriteValInst =
468 WriteValInstance.apply_domain(IncludeElement);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000469
Michael Kruse68821a82017-10-31 16:11:46 +0000470 AllWriteValInst = AllWriteValInst.unite(EltWriteValInst);
471}
472
473/// Return whether @p PHI refers (also transitively through other PHIs) to
474/// itself.
475///
476/// loop:
477/// %phi1 = phi [0, %preheader], [%phi1, %loop]
478/// br i1 %c, label %loop, label %exit
479///
480/// exit:
481/// %phi2 = phi [%phi1, %bb]
482///
483/// In this example, %phi1 is recursive, but %phi2 is not.
484static bool isRecursivePHI(const PHINode *PHI) {
485 SmallVector<const PHINode *, 8> Worklist;
486 SmallPtrSet<const PHINode *, 8> Visited;
487 Worklist.push_back(PHI);
488
489 while (!Worklist.empty()) {
490 const PHINode *Cur = Worklist.pop_back_val();
491
492 if (Visited.count(Cur))
493 continue;
494 Visited.insert(Cur);
495
496 for (const Use &Incoming : Cur->incoming_values()) {
497 Value *IncomingVal = Incoming.get();
498 auto *IncomingPHI = dyn_cast<PHINode>(IncomingVal);
499 if (!IncomingPHI)
500 continue;
501
502 if (IncomingPHI == PHI)
503 return true;
504 Worklist.push_back(IncomingPHI);
505 }
506 }
507 return false;
508}
509
510isl::union_map ZoneAlgorithm::computePerPHI(const ScopArrayInfo *SAI) {
511 // TODO: If the PHI has an incoming block from before the SCoP, it is not
512 // represented in any ScopStmt.
513
514 auto *PHI = cast<PHINode>(SAI->getBasePtr());
515 auto It = PerPHIMaps.find(PHI);
516 if (It != PerPHIMaps.end())
517 return It->second;
518
519 assert(SAI->isPHIKind());
520
521 // { DomainPHIWrite[] -> Scatter[] }
522 isl::union_map PHIWriteScatter = makeEmptyUnionMap();
523
524 // Collect all incoming block timepoints.
525 for (MemoryAccess *MA : S->getPHIIncomings(SAI)) {
526 isl::map Scatter = getScatterFor(MA);
527 PHIWriteScatter = PHIWriteScatter.add_map(Scatter);
528 }
529
530 // { DomainPHIRead[] -> Scatter[] }
531 isl::map PHIReadScatter = getScatterFor(S->getPHIRead(SAI));
532
533 // { DomainPHIRead[] -> Scatter[] }
534 isl::map BeforeRead = beforeScatter(PHIReadScatter, true);
535
536 // { Scatter[] }
537 isl::set WriteTimes = singleton(PHIWriteScatter.range(), ScatterSpace);
538
539 // { DomainPHIRead[] -> Scatter[] }
540 isl::map PHIWriteTimes = BeforeRead.intersect_range(WriteTimes);
541 isl::map LastPerPHIWrites = PHIWriteTimes.lexmax();
542
543 // { DomainPHIRead[] -> DomainPHIWrite[] }
544 isl::union_map Result =
545 isl::union_map(LastPerPHIWrites).apply_range(PHIWriteScatter.reverse());
546 assert(!Result.is_single_valued().is_false());
547 assert(!Result.is_injective().is_false());
548
549 PerPHIMaps.insert({PHI, Result});
550 return Result;
Michael Kruse138a3fb2017-08-04 22:51:23 +0000551}
552
553isl::union_set ZoneAlgorithm::makeEmptyUnionSet() const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000554 return isl::union_set::empty(ParamSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000555}
556
557isl::union_map ZoneAlgorithm::makeEmptyUnionMap() const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000558 return isl::union_map::empty(ParamSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000559}
560
Michael Kruse47281842017-08-28 20:39:07 +0000561void ZoneAlgorithm::collectCompatibleElts() {
562 // First find all the incompatible elements, then take the complement.
563 // We compile the list of compatible (rather than incompatible) elements so
564 // users can intersect with the list, not requiring a subtract operation. It
565 // also allows us to define a 'universe' of all elements and makes it more
566 // explicit in which array elements can be used.
567 isl::union_set AllElts = makeEmptyUnionSet();
568 isl::union_set IncompatibleElts = makeEmptyUnionSet();
569
570 for (auto &Stmt : *S)
571 collectIncompatibleElts(&Stmt, IncompatibleElts, AllElts);
572
573 NumIncompatibleArrays += isl_union_set_n_set(IncompatibleElts.keep());
574 CompatibleElts = AllElts.subtract(IncompatibleElts);
575 NumCompatibleArrays += isl_union_set_n_set(CompatibleElts.keep());
Michael Kruse138a3fb2017-08-04 22:51:23 +0000576}
577
578isl::map ZoneAlgorithm::getScatterFor(ScopStmt *Stmt) const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000579 isl::space ResultSpace =
580 Stmt->getDomainSpace().map_from_domain_and_range(ScatterSpace);
581 return Schedule.extract_map(ResultSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000582}
583
584isl::map ZoneAlgorithm::getScatterFor(MemoryAccess *MA) const {
585 return getScatterFor(MA->getStatement());
586}
587
588isl::union_map ZoneAlgorithm::getScatterFor(isl::union_set Domain) const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000589 return Schedule.intersect_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000590}
591
592isl::map ZoneAlgorithm::getScatterFor(isl::set Domain) const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000593 auto ResultSpace = Domain.get_space().map_from_domain_and_range(ScatterSpace);
594 auto UDomain = isl::union_set(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000595 auto UResult = getScatterFor(std::move(UDomain));
596 auto Result = singleton(std::move(UResult), std::move(ResultSpace));
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000597 assert(!Result || Result.domain().is_equal(Domain) == isl_bool_true);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000598 return Result;
599}
600
601isl::set ZoneAlgorithm::getDomainFor(ScopStmt *Stmt) const {
Tobias Grosserdcf8d692017-08-06 16:39:52 +0000602 return Stmt->getDomain().remove_redundancies();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000603}
604
605isl::set ZoneAlgorithm::getDomainFor(MemoryAccess *MA) const {
606 return getDomainFor(MA->getStatement());
607}
608
609isl::map ZoneAlgorithm::getAccessRelationFor(MemoryAccess *MA) const {
610 auto Domain = getDomainFor(MA);
611 auto AccRel = MA->getLatestAccessRelation();
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000612 return AccRel.intersect_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000613}
614
615isl::map ZoneAlgorithm::getScalarReachingDefinition(ScopStmt *Stmt) {
616 auto &Result = ScalarReachDefZone[Stmt];
617 if (Result)
618 return Result;
619
620 auto Domain = getDomainFor(Stmt);
621 Result = computeScalarReachingDefinition(Schedule, Domain, false, true);
622 simplify(Result);
623
624 return Result;
625}
626
627isl::map ZoneAlgorithm::getScalarReachingDefinition(isl::set DomainDef) {
628 auto DomId = give(isl_set_get_tuple_id(DomainDef.keep()));
629 auto *Stmt = static_cast<ScopStmt *>(isl_id_get_user(DomId.keep()));
630
631 auto StmtResult = getScalarReachingDefinition(Stmt);
632
633 return give(isl_map_intersect_range(StmtResult.take(), DomainDef.take()));
634}
635
636isl::map ZoneAlgorithm::makeUnknownForDomain(ScopStmt *Stmt) const {
637 return ::makeUnknownForDomain(getDomainFor(Stmt));
638}
639
640isl::id ZoneAlgorithm::makeValueId(Value *V) {
641 if (!V)
642 return nullptr;
643
644 auto &Id = ValueIds[V];
645 if (Id.is_null()) {
646 auto Name = getIslCompatibleName("Val_", V, ValueIds.size() - 1,
647 std::string(), UseInstructionNames);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000648 Id = isl::id::alloc(IslCtx.get(), Name.c_str(), V);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000649 }
650 return Id;
651}
652
653isl::space ZoneAlgorithm::makeValueSpace(Value *V) {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000654 auto Result = ParamSpace.set_from_params();
655 return Result.set_tuple_id(isl::dim::set, makeValueId(V));
Michael Kruse138a3fb2017-08-04 22:51:23 +0000656}
657
658isl::set ZoneAlgorithm::makeValueSet(Value *V) {
659 auto Space = makeValueSpace(V);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000660 return isl::set::universe(Space);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000661}
662
663isl::map ZoneAlgorithm::makeValInst(Value *Val, ScopStmt *UserStmt, Loop *Scope,
664 bool IsCertain) {
665 // If the definition/write is conditional, the value at the location could
666 // be either the written value or the old value. Since we cannot know which
667 // one, consider the value to be unknown.
668 if (!IsCertain)
669 return makeUnknownForDomain(UserStmt);
670
671 auto DomainUse = getDomainFor(UserStmt);
672 auto VUse = VirtualUse::create(S, UserStmt, Scope, Val, true);
673 switch (VUse.getKind()) {
674 case VirtualUse::Constant:
675 case VirtualUse::Block:
676 case VirtualUse::Hoisted:
677 case VirtualUse::ReadOnly: {
678 // The definition does not depend on the statement which uses it.
679 auto ValSet = makeValueSet(Val);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000680 return isl::map::from_domain_and_range(DomainUse, ValSet);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000681 }
682
683 case VirtualUse::Synthesizable: {
684 auto *ScevExpr = VUse.getScevExpr();
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000685 auto UseDomainSpace = DomainUse.get_space();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000686
687 // Construct the SCEV space.
688 // TODO: Add only the induction variables referenced in SCEVAddRecExpr
689 // expressions, not just all of them.
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000690 auto ScevId = isl::manage(isl_id_alloc(
691 UseDomainSpace.get_ctx().get(), nullptr, const_cast<SCEV *>(ScevExpr)));
692
693 auto ScevSpace = UseDomainSpace.drop_dims(isl::dim::set, 0, 0);
694 ScevSpace = ScevSpace.set_tuple_id(isl::dim::set, ScevId);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000695
696 // { DomainUse[] -> ScevExpr[] }
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000697 auto ValInst =
698 isl::map::identity(UseDomainSpace.map_from_domain_and_range(ScevSpace));
Michael Kruse138a3fb2017-08-04 22:51:23 +0000699 return ValInst;
700 }
701
702 case VirtualUse::Intra: {
703 // Definition and use is in the same statement. We do not need to compute
704 // a reaching definition.
705
706 // { llvm::Value }
707 auto ValSet = makeValueSet(Val);
708
709 // { UserDomain[] -> llvm::Value }
710 auto ValInstSet =
711 give(isl_map_from_domain_and_range(DomainUse.take(), ValSet.take()));
712
713 // { UserDomain[] -> [UserDomain[] - >llvm::Value] }
714 auto Result = give(isl_map_reverse(isl_map_domain_map(ValInstSet.take())));
715 simplify(Result);
716 return Result;
717 }
718
719 case VirtualUse::Inter: {
720 // The value is defined in a different statement.
721
722 auto *Inst = cast<Instruction>(Val);
723 auto *ValStmt = S->getStmtFor(Inst);
724
725 // If the llvm::Value is defined in a removed Stmt, we cannot derive its
726 // domain. We could use an arbitrary statement, but this could result in
727 // different ValInst[] for the same llvm::Value.
728 if (!ValStmt)
729 return ::makeUnknownForDomain(DomainUse);
730
731 // { DomainDef[] }
732 auto DomainDef = getDomainFor(ValStmt);
733
734 // { Scatter[] -> DomainDef[] }
735 auto ReachDef = getScalarReachingDefinition(DomainDef);
736
737 // { DomainUse[] -> Scatter[] }
738 auto UserSched = getScatterFor(DomainUse);
739
740 // { DomainUse[] -> DomainDef[] }
741 auto UsedInstance =
742 give(isl_map_apply_range(UserSched.take(), ReachDef.take()));
743
744 // { llvm::Value }
745 auto ValSet = makeValueSet(Val);
746
747 // { DomainUse[] -> llvm::Value[] }
748 auto ValInstSet =
749 give(isl_map_from_domain_and_range(DomainUse.take(), ValSet.take()));
750
751 // { DomainUse[] -> [DomainDef[] -> llvm::Value] }
752 auto Result =
753 give(isl_map_range_product(UsedInstance.take(), ValInstSet.take()));
754
755 simplify(Result);
756 return Result;
757 }
758 }
759 llvm_unreachable("Unhandled use type");
760}
761
Michael Kruse68821a82017-10-31 16:11:46 +0000762/// Remove all computed PHIs out of @p Input and replace by their incoming
763/// value.
764///
765/// @param Input { [] -> ValInst[] }
766/// @param ComputedPHIs Set of PHIs that are replaced. Its ValInst must appear
767/// on the LHS of @p NormalizeMap.
768/// @param NormalizeMap { ValInst[] -> ValInst[] }
769static isl::union_map normalizeValInst(isl::union_map Input,
770 const DenseSet<PHINode *> &ComputedPHIs,
771 isl::union_map NormalizeMap) {
772 isl::union_map Result = isl::union_map::empty(Input.get_space());
773 Input.foreach_map(
774 [&Result, &ComputedPHIs, &NormalizeMap](isl::map Map) -> isl::stat {
775 isl::space Space = Map.get_space();
776 isl::space RangeSpace = Space.range();
777
778 // Instructions within the SCoP are always wrapped. Non-wrapped tuples
779 // are therefore invariant in the SCoP and don't need normalization.
780 if (!RangeSpace.is_wrapping()) {
781 Result = Result.add_map(Map);
782 return isl::stat::ok;
783 }
784
785 auto *PHI = dyn_cast<PHINode>(static_cast<Value *>(
786 RangeSpace.unwrap().get_tuple_id(isl::dim::out).get_user()));
787
788 // If no normalization is necessary, then the ValInst stands for itself.
789 if (!ComputedPHIs.count(PHI)) {
790 Result = Result.add_map(Map);
791 return isl::stat::ok;
792 }
793
794 // Otherwise, apply the normalization.
795 isl::union_map Mapped = isl::union_map(Map).apply_range(NormalizeMap);
796 Result = Result.unite(Mapped);
797 NumPHINormialization++;
798 return isl::stat::ok;
799 });
800 return Result;
801}
802
803isl::union_map ZoneAlgorithm::makeNormalizedValInst(llvm::Value *Val,
804 ScopStmt *UserStmt,
805 llvm::Loop *Scope,
806 bool IsCertain) {
807 isl::map ValInst = makeValInst(Val, UserStmt, Scope, IsCertain);
808 isl::union_map Normalized =
809 normalizeValInst(ValInst, ComputedPHIs, NormalizeMap);
810 return Normalized;
811}
812
Michael Kruse47281842017-08-28 20:39:07 +0000813bool ZoneAlgorithm::isCompatibleAccess(MemoryAccess *MA) {
814 if (!MA)
815 return false;
816 if (!MA->isLatestArrayKind())
817 return false;
818 Instruction *AccInst = MA->getAccessInstruction();
819 return isa<StoreInst>(AccInst) || isa<LoadInst>(AccInst);
820}
821
Michael Kruse68821a82017-10-31 16:11:46 +0000822bool ZoneAlgorithm::isNormalizable(MemoryAccess *MA) {
823 assert(MA->isRead());
824
825 // Exclude ExitPHIs, we are assuming that a normalizable PHI has a READ
826 // MemoryAccess.
827 if (!MA->isOriginalPHIKind())
828 return false;
829
830 // Exclude recursive PHIs, normalizing them would require a transitive
831 // closure.
832 auto *PHI = cast<PHINode>(MA->getAccessInstruction());
833 if (RecursivePHIs.count(PHI))
834 return false;
835
836 // Ensure that each incoming value can be represented by a ValInst[].
837 // We do represent values from statements associated to multiple incoming
838 // value by the PHI itself, but we do not handle this case yet (especially
839 // isNormalized()) when normalizing.
840 const ScopArrayInfo *SAI = MA->getOriginalScopArrayInfo();
841 auto Incomings = S->getPHIIncomings(SAI);
842 for (MemoryAccess *Incoming : Incomings) {
843 if (Incoming->getIncoming().size() != 1)
844 return false;
845 }
846
847 return true;
848}
849
850bool ZoneAlgorithm::isNormalized(isl::map Map) {
851 isl::space Space = Map.get_space();
852 isl::space RangeSpace = Space.range();
853
854 if (!RangeSpace.is_wrapping())
855 return true;
856
857 auto *PHI = dyn_cast<PHINode>(static_cast<Value *>(
858 RangeSpace.unwrap().get_tuple_id(isl::dim::out).get_user()));
859 if (!PHI)
860 return true;
861
862 auto *IncomingStmt = static_cast<ScopStmt *>(
863 RangeSpace.unwrap().get_tuple_id(isl::dim::in).get_user());
864 MemoryAccess *PHIRead = IncomingStmt->lookupPHIReadOf(PHI);
865 if (!isNormalizable(PHIRead))
866 return true;
867
868 return false;
869}
870
871bool ZoneAlgorithm::isNormalized(isl::union_map UMap) {
872 auto Result = UMap.foreach_map([this](isl::map Map) -> isl::stat {
873 if (isNormalized(Map))
874 return isl::stat::ok;
875 return isl::stat::error;
876 });
877 return Result == isl::stat::ok;
878}
879
Michael Kruse138a3fb2017-08-04 22:51:23 +0000880void ZoneAlgorithm::computeCommon() {
881 AllReads = makeEmptyUnionMap();
882 AllMayWrites = makeEmptyUnionMap();
883 AllMustWrites = makeEmptyUnionMap();
884 AllWriteValInst = makeEmptyUnionMap();
Michael Kruse70af4f52017-08-07 18:40:29 +0000885 AllReadValInst = makeEmptyUnionMap();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000886
Michael Kruse68821a82017-10-31 16:11:46 +0000887 // Default to empty, i.e. no normalization/replacement is taking place. Call
888 // computeNormalizedPHIs() to initialize.
889 NormalizeMap = makeEmptyUnionMap();
890 ComputedPHIs.clear();
891
Michael Kruse138a3fb2017-08-04 22:51:23 +0000892 for (auto &Stmt : *S) {
893 for (auto *MA : Stmt) {
894 if (!MA->isLatestArrayKind())
895 continue;
896
897 if (MA->isRead())
898 addArrayReadAccess(MA);
899
900 if (MA->isWrite())
901 addArrayWriteAccess(MA);
902 }
903 }
904
905 // { DomainWrite[] -> Element[] }
Michael Kruse70af4f52017-08-07 18:40:29 +0000906 AllWrites =
Michael Kruse138a3fb2017-08-04 22:51:23 +0000907 give(isl_union_map_union(AllMustWrites.copy(), AllMayWrites.copy()));
908
909 // { [Element[] -> Zone[]] -> DomainWrite[] }
910 WriteReachDefZone =
911 computeReachingDefinition(Schedule, AllWrites, false, true);
912 simplify(WriteReachDefZone);
913}
914
Michael Kruse68821a82017-10-31 16:11:46 +0000915void ZoneAlgorithm::computeNormalizedPHIs() {
916 // Determine which PHIs can reference themselves. They are excluded from
917 // normalization to avoid problems with transitive closures.
918 for (ScopStmt &Stmt : *S) {
919 for (MemoryAccess *MA : Stmt) {
920 if (!MA->isPHIKind())
921 continue;
922 if (!MA->isRead())
923 continue;
924
925 // TODO: Can be more efficient since isRecursivePHI can theoretically
926 // determine recursiveness for multiple values and/or cache results.
927 auto *PHI = cast<PHINode>(MA->getAccessInstruction());
928 if (isRecursivePHI(PHI)) {
929 NumRecursivePHIs++;
930 RecursivePHIs.insert(PHI);
931 }
932 }
933 }
934
935 // { PHIValInst[] -> IncomingValInst[] }
936 isl::union_map AllPHIMaps = makeEmptyUnionMap();
937
938 // Discover new PHIs and try to normalize them.
939 DenseSet<PHINode *> AllPHIs;
940 for (ScopStmt &Stmt : *S) {
941 for (MemoryAccess *MA : Stmt) {
942 if (!MA->isOriginalPHIKind())
943 continue;
944 if (!MA->isRead())
945 continue;
946 if (!isNormalizable(MA))
947 continue;
948
949 auto *PHI = cast<PHINode>(MA->getAccessInstruction());
950 const ScopArrayInfo *SAI = MA->getOriginalScopArrayInfo();
951
952 // { PHIDomain[] -> PHIValInst[] }
953 isl::map PHIValInst = makeValInst(PHI, &Stmt, Stmt.getSurroundingLoop());
954
955 // { IncomingDomain[] -> IncomingValInst[] }
956 isl::union_map IncomingValInsts = makeEmptyUnionMap();
957
958 // Get all incoming values.
959 for (MemoryAccess *MA : S->getPHIIncomings(SAI)) {
960 ScopStmt *IncomingStmt = MA->getStatement();
961
962 auto Incoming = MA->getIncoming();
963 assert(Incoming.size() == 1 && "The incoming value must be "
964 "representable by something else than "
965 "the PHI itself");
966 Value *IncomingVal = Incoming[0].second;
967
968 // { IncomingDomain[] -> IncomingValInst[] }
969 isl::map IncomingValInst = makeValInst(
970 IncomingVal, IncomingStmt, IncomingStmt->getSurroundingLoop());
971
972 IncomingValInsts = IncomingValInsts.add_map(IncomingValInst);
973 }
974
975 // Determine which instance of the PHI statement corresponds to which
976 // incoming value.
977 // { PHIDomain[] -> IncomingDomain[] }
978 isl::union_map PerPHI = computePerPHI(SAI);
979
980 // { PHIValInst[] -> IncomingValInst[] }
981 isl::union_map PHIMap =
982 PerPHI.apply_domain(PHIValInst).apply_range(IncomingValInsts);
983 assert(!PHIMap.is_single_valued().is_false());
984
985 // Resolve transitiveness: The incoming value of the newly discovered PHI
986 // may reference a previously normalized PHI. At the same time, already
987 // normalized PHIs might be normalized to the new PHI. At the end, none of
988 // the PHIs may appear on the right-hand-side of the normalization map.
989 PHIMap = normalizeValInst(PHIMap, AllPHIs, AllPHIMaps);
990 AllPHIs.insert(PHI);
991 AllPHIMaps = normalizeValInst(AllPHIMaps, AllPHIs, PHIMap);
992
993 AllPHIMaps = AllPHIMaps.unite(PHIMap);
994 NumNormalizablePHIs++;
995 }
996 }
997 simplify(AllPHIMaps);
998
999 // Apply the normalization.
1000 ComputedPHIs = AllPHIs;
1001 NormalizeMap = AllPHIMaps;
1002
1003 assert(!NormalizeMap || isNormalized(NormalizeMap));
1004}
1005
Michael Kruse138a3fb2017-08-04 22:51:23 +00001006void ZoneAlgorithm::printAccesses(llvm::raw_ostream &OS, int Indent) const {
1007 OS.indent(Indent) << "After accesses {\n";
1008 for (auto &Stmt : *S) {
1009 OS.indent(Indent + 4) << Stmt.getBaseName() << "\n";
1010 for (auto *MA : Stmt)
1011 MA->print(OS);
1012 }
1013 OS.indent(Indent) << "}\n";
1014}
Michael Kruse70af4f52017-08-07 18:40:29 +00001015
1016isl::union_map ZoneAlgorithm::computeKnownFromMustWrites() const {
1017 // { [Element[] -> Zone[]] -> [Element[] -> DomainWrite[]] }
1018 isl::union_map EltReachdDef = distributeDomain(WriteReachDefZone.curry());
1019
1020 // { [Element[] -> DomainWrite[]] -> ValInst[] }
1021 isl::union_map AllKnownWriteValInst = filterKnownValInst(AllWriteValInst);
1022
1023 // { [Element[] -> Zone[]] -> ValInst[] }
1024 return EltReachdDef.apply_range(AllKnownWriteValInst);
1025}
1026
1027isl::union_map ZoneAlgorithm::computeKnownFromLoad() const {
1028 // { Element[] }
1029 isl::union_set AllAccessedElts = AllReads.range().unite(AllWrites.range());
1030
1031 // { Element[] -> Scatter[] }
1032 isl::union_map EltZoneUniverse = isl::union_map::from_domain_and_range(
1033 AllAccessedElts, isl::set::universe(ScatterSpace));
1034
1035 // This assumes there are no "holes" in
1036 // isl_union_map_domain(WriteReachDefZone); alternatively, compute the zone
1037 // before the first write or that are not written at all.
1038 // { Element[] -> Scatter[] }
1039 isl::union_set NonReachDef =
1040 EltZoneUniverse.wrap().subtract(WriteReachDefZone.domain());
1041
1042 // { [Element[] -> Zone[]] -> ReachDefId[] }
1043 isl::union_map DefZone =
1044 WriteReachDefZone.unite(isl::union_map::from_domain(NonReachDef));
1045
1046 // { [Element[] -> Scatter[]] -> Element[] }
1047 isl::union_map EltZoneElt = EltZoneUniverse.domain_map();
1048
1049 // { [Element[] -> Zone[]] -> [Element[] -> ReachDefId[]] }
1050 isl::union_map DefZoneEltDefId = EltZoneElt.range_product(DefZone);
1051
1052 // { Element[] -> [Zone[] -> ReachDefId[]] }
1053 isl::union_map EltDefZone = DefZone.curry();
1054
1055 // { [Element[] -> Zone[] -> [Element[] -> ReachDefId[]] }
1056 isl::union_map EltZoneEltDefid = distributeDomain(EltDefZone);
1057
1058 // { [Element[] -> Scatter[]] -> DomainRead[] }
1059 isl::union_map Reads = AllReads.range_product(Schedule).reverse();
1060
1061 // { [Element[] -> Scatter[]] -> [Element[] -> DomainRead[]] }
1062 isl::union_map ReadsElt = EltZoneElt.range_product(Reads);
1063
1064 // { [Element[] -> Scatter[]] -> ValInst[] }
1065 isl::union_map ScatterKnown = ReadsElt.apply_range(AllReadValInst);
1066
1067 // { [Element[] -> ReachDefId[]] -> ValInst[] }
1068 isl::union_map DefidKnown =
1069 DefZoneEltDefId.apply_domain(ScatterKnown).reverse();
1070
1071 // { [Element[] -> Zone[]] -> ValInst[] }
1072 return DefZoneEltDefId.apply_range(DefidKnown);
1073}
1074
1075isl::union_map ZoneAlgorithm::computeKnown(bool FromWrite,
1076 bool FromRead) const {
1077 isl::union_map Result = makeEmptyUnionMap();
1078
1079 if (FromWrite)
1080 Result = Result.unite(computeKnownFromMustWrites());
1081
1082 if (FromRead)
1083 Result = Result.unite(computeKnownFromLoad());
1084
1085 simplify(Result);
1086 return Result;
1087}