blob: a34fcb3775430551c28d3bcb7daf7820924cdc2f [file] [log] [blame]
Michael Kruse138a3fb2017-08-04 22:51:23 +00001//===------ ZoneAlgo.cpp ----------------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// Derive information about array elements between statements ("Zones").
11//
12// The algorithms here work on the scatter space - the image space of the
13// schedule returned by Scop::getSchedule(). We call an element in that space a
14// "timepoint". Timepoints are lexicographically ordered such that we can
15// defined ranges in the scatter space. We use two flavors of such ranges:
16// Timepoint sets and zones. A timepoint set is simply a subset of the scatter
17// space and is directly stored as isl_set.
18//
19// Zones are used to describe the space between timepoints as open sets, i.e.
20// they do not contain the extrema. Using isl rational sets to express these
21// would be overkill. We also cannot store them as the integer timepoints they
22// contain; the (nonempty) zone between 1 and 2 would be empty and
23// indistinguishable from e.g. the zone between 3 and 4. Also, we cannot store
24// the integer set including the extrema; the set ]1,2[ + ]3,4[ could be
25// coalesced to ]1,3[, although we defined the range [2,3] to be not in the set.
26// Instead, we store the "half-open" integer extrema, including the lower bound,
27// but excluding the upper bound. Examples:
28//
29// * The set { [i] : 1 <= i <= 3 } represents the zone ]0,3[ (which contains the
30// integer points 1 and 2, but not 0 or 3)
31//
32// * { [1] } represents the zone ]0,1[
33//
34// * { [i] : i = 1 or i = 3 } represents the zone ]0,1[ + ]2,3[
35//
36// Therefore, an integer i in the set represents the zone ]i-1,i[, i.e. strictly
37// speaking the integer points never belong to the zone. However, depending an
38// the interpretation, one might want to include them. Part of the
39// interpretation may not be known when the zone is constructed.
40//
41// Reads are assumed to always take place before writes, hence we can think of
42// reads taking place at the beginning of a timepoint and writes at the end.
43//
44// Let's assume that the zone represents the lifetime of a variable. That is,
45// the zone begins with a write that defines the value during its lifetime and
46// ends with the last read of that value. In the following we consider whether a
47// read/write at the beginning/ending of the lifetime zone should be within the
48// zone or outside of it.
49//
50// * A read at the timepoint that starts the live-range loads the previous
51// value. Hence, exclude the timepoint starting the zone.
52//
53// * A write at the timepoint that starts the live-range is not defined whether
54// it occurs before or after the write that starts the lifetime. We do not
55// allow this situation to occur. Hence, we include the timepoint starting the
56// zone to determine whether they are conflicting.
57//
58// * A read at the timepoint that ends the live-range reads the same variable.
59// We include the timepoint at the end of the zone to include that read into
60// the live-range. Doing otherwise would mean that the two reads access
61// different values, which would mean that the value they read are both alive
62// at the same time but occupy the same variable.
63//
64// * A write at the timepoint that ends the live-range starts a new live-range.
65// It must not be included in the live-range of the previous definition.
66//
67// All combinations of reads and writes at the endpoints are possible, but most
68// of the time only the write->read (for instance, a live-range from definition
69// to last use) and read->write (for instance, an unused range from last use to
70// overwrite) and combinations are interesting (half-open ranges). write->write
71// zones might be useful as well in some context to represent
72// output-dependencies.
73//
74// @see convertZoneToTimepoints
75//
76//
77// The code makes use of maps and sets in many different spaces. To not loose
78// track in which space a set or map is expected to be in, variables holding an
79// isl reference are usually annotated in the comments. They roughly follow isl
80// syntax for spaces, but only the tuples, not the dimensions. The tuples have a
81// meaning as follows:
82//
83// * Space[] - An unspecified tuple. Used for function parameters such that the
84// function caller can use it for anything they like.
85//
86// * Domain[] - A statement instance as returned by ScopStmt::getDomain()
87// isl_id_get_name: Stmt_<NameOfBasicBlock>
88// isl_id_get_user: Pointer to ScopStmt
89//
90// * Element[] - An array element as in the range part of
91// MemoryAccess::getAccessRelation()
92// isl_id_get_name: MemRef_<NameOfArrayVariable>
93// isl_id_get_user: Pointer to ScopArrayInfo
94//
95// * Scatter[] - Scatter space or space of timepoints
96// Has no tuple id
97//
98// * Zone[] - Range between timepoints as described above
99// Has no tuple id
100//
101// * ValInst[] - An llvm::Value as defined at a specific timepoint.
102//
103// A ValInst[] itself can be structured as one of:
104//
105// * [] - An unknown value.
106// Always zero dimensions
107// Has no tuple id
108//
109// * Value[] - An llvm::Value that is read-only in the SCoP, i.e. its
110// runtime content does not depend on the timepoint.
111// Always zero dimensions
112// isl_id_get_name: Val_<NameOfValue>
113// isl_id_get_user: A pointer to an llvm::Value
114//
115// * SCEV[...] - A synthesizable llvm::SCEV Expression.
116// In contrast to a Value[] is has at least one dimension per
117// SCEVAddRecExpr in the SCEV.
118//
119// * [Domain[] -> Value[]] - An llvm::Value that may change during the
120// Scop's execution.
121// The tuple itself has no id, but it wraps a map space holding a
122// statement instance which defines the llvm::Value as the map's domain
123// and llvm::Value itself as range.
124//
125// @see makeValInst()
126//
127// An annotation "{ Domain[] -> Scatter[] }" therefore means: A map from a
128// statement instance to a timepoint, aka a schedule. There is only one scatter
129// space, but most of the time multiple statements are processed in one set.
130// This is why most of the time isl_union_map has to be used.
131//
132// The basic algorithm works as follows:
133// At first we verify that the SCoP is compatible with this technique. For
134// instance, two writes cannot write to the same location at the same statement
135// instance because we cannot determine within the polyhedral model which one
136// comes first. Once this was verified, we compute zones at which an array
137// element is unused. This computation can fail if it takes too long. Then the
138// main algorithm is executed. Because every store potentially trails an unused
139// zone, we start at stores. We search for a scalar (MemoryKind::Value or
140// MemoryKind::PHI) that we can map to the array element overwritten by the
141// store, preferably one that is used by the store or at least the ScopStmt.
142// When it does not conflict with the lifetime of the values in the array
143// element, the map is applied and the unused zone updated as it is now used. We
144// continue to try to map scalars to the array element until there are no more
145// candidates to map. The algorithm is greedy in the sense that the first scalar
146// not conflicting will be mapped. Other scalars processed later that could have
147// fit the same unused zone will be rejected. As such the result depends on the
148// processing order.
149//
150//===----------------------------------------------------------------------===//
151
152#include "polly/ZoneAlgo.h"
153#include "polly/ScopInfo.h"
154#include "polly/Support/GICHelper.h"
155#include "polly/Support/ISLTools.h"
156#include "polly/Support/VirtualInstruction.h"
Michael Kruse47281842017-08-28 20:39:07 +0000157#include "llvm/ADT/Statistic.h"
Zhaoshi Zhengceec1752017-11-17 22:05:19 +0000158#include "llvm/Support/raw_ostream.h"
Michael Kruse138a3fb2017-08-04 22:51:23 +0000159
160#define DEBUG_TYPE "polly-zone"
161
Michael Kruse47281842017-08-28 20:39:07 +0000162STATISTIC(NumIncompatibleArrays, "Number of not zone-analyzable arrays");
163STATISTIC(NumCompatibleArrays, "Number of zone-analyzable arrays");
Michael Kruse68821a82017-10-31 16:11:46 +0000164STATISTIC(NumRecursivePHIs, "Number of recursive PHIs");
165STATISTIC(NumNormalizablePHIs, "Number of normalizable PHIs");
166STATISTIC(NumPHINormialization, "Number of PHI executed normalizations");
Michael Kruse47281842017-08-28 20:39:07 +0000167
Michael Kruse138a3fb2017-08-04 22:51:23 +0000168using namespace polly;
169using namespace llvm;
170
171static isl::union_map computeReachingDefinition(isl::union_map Schedule,
172 isl::union_map Writes,
173 bool InclDef, bool InclRedef) {
174 return computeReachingWrite(Schedule, Writes, false, InclDef, InclRedef);
175}
176
177/// Compute the reaching definition of a scalar.
178///
179/// Compared to computeReachingDefinition, there is just one element which is
180/// accessed and therefore only a set if instances that accesses that element is
181/// required.
182///
183/// @param Schedule { DomainWrite[] -> Scatter[] }
184/// @param Writes { DomainWrite[] }
185/// @param InclDef Include the timepoint of the definition to the result.
186/// @param InclRedef Include the timepoint of the overwrite into the result.
187///
188/// @return { Scatter[] -> DomainWrite[] }
189static isl::union_map computeScalarReachingDefinition(isl::union_map Schedule,
190 isl::union_set Writes,
191 bool InclDef,
192 bool InclRedef) {
Michael Kruse138a3fb2017-08-04 22:51:23 +0000193 // { DomainWrite[] -> Element[] }
Tobias Grosser0dd42512017-08-21 14:19:40 +0000194 isl::union_map Defs = isl::union_map::from_domain(Writes);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000195
196 // { [Element[] -> Scatter[]] -> DomainWrite[] }
197 auto ReachDefs =
198 computeReachingDefinition(Schedule, Defs, InclDef, InclRedef);
199
200 // { Scatter[] -> DomainWrite[] }
Tobias Grosser0dd42512017-08-21 14:19:40 +0000201 return ReachDefs.curry().range().unwrap();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000202}
203
204/// Compute the reaching definition of a scalar.
205///
206/// This overload accepts only a single writing statement as an isl_map,
207/// consequently the result also is only a single isl_map.
208///
209/// @param Schedule { DomainWrite[] -> Scatter[] }
210/// @param Writes { DomainWrite[] }
211/// @param InclDef Include the timepoint of the definition to the result.
212/// @param InclRedef Include the timepoint of the overwrite into the result.
213///
214/// @return { Scatter[] -> DomainWrite[] }
215static isl::map computeScalarReachingDefinition(isl::union_map Schedule,
216 isl::set Writes, bool InclDef,
217 bool InclRedef) {
Tobias Grosser0dd42512017-08-21 14:19:40 +0000218 isl::space DomainSpace = Writes.get_space();
219 isl::space ScatterSpace = getScatterSpace(Schedule);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000220
221 // { Scatter[] -> DomainWrite[] }
Tobias Grosser0dd42512017-08-21 14:19:40 +0000222 isl::union_map UMap = computeScalarReachingDefinition(
223 Schedule, isl::union_set(Writes), InclDef, InclRedef);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000224
Tobias Grosser0dd42512017-08-21 14:19:40 +0000225 isl::space ResultSpace = ScatterSpace.map_from_domain_and_range(DomainSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000226 return singleton(UMap, ResultSpace);
227}
228
229isl::union_map polly::makeUnknownForDomain(isl::union_set Domain) {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000230 return isl::union_map::from_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000231}
232
233/// Create a domain-to-unknown value mapping.
234///
235/// @see makeUnknownForDomain(isl::union_set)
236///
237/// @param Domain { Domain[] }
238///
239/// @return { Domain[] -> ValInst[] }
240static isl::map makeUnknownForDomain(isl::set Domain) {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000241 return isl::map::from_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000242}
243
Michael Kruse70af4f52017-08-07 18:40:29 +0000244/// Return whether @p Map maps to an unknown value.
245///
246/// @param { [] -> ValInst[] }
247static bool isMapToUnknown(const isl::map &Map) {
248 isl::space Space = Map.get_space().range();
249 return Space.has_tuple_id(isl::dim::set).is_false() &&
250 Space.is_wrapping().is_false() && Space.dim(isl::dim::set) == 0;
251}
252
Michael Krusece673582017-08-08 17:00:27 +0000253isl::union_map polly::filterKnownValInst(const isl::union_map &UMap) {
Michael Kruse70af4f52017-08-07 18:40:29 +0000254 isl::union_map Result = isl::union_map::empty(UMap.get_space());
Michael Kruse630fc7b2017-08-09 11:21:40 +0000255 isl::stat Success = UMap.foreach_map([=, &Result](isl::map Map) -> isl::stat {
Michael Kruse70af4f52017-08-07 18:40:29 +0000256 if (!isMapToUnknown(Map))
257 Result = Result.add_map(Map);
258 return isl::stat::ok;
259 });
Michael Kruse630fc7b2017-08-09 11:21:40 +0000260 if (Success != isl::stat::ok)
261 return {};
Michael Kruse70af4f52017-08-07 18:40:29 +0000262 return Result;
263}
264
Michael Kruse138a3fb2017-08-04 22:51:23 +0000265ZoneAlgorithm::ZoneAlgorithm(const char *PassName, Scop *S, LoopInfo *LI)
266 : PassName(PassName), IslCtx(S->getSharedIslCtx()), S(S), LI(LI),
Tobias Grosser61bd3a42017-08-06 21:42:38 +0000267 Schedule(S->getSchedule()) {
Tobias Grosser31df6f32017-08-06 21:42:25 +0000268 auto Domains = S->getDomains();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000269
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000270 Schedule = Schedule.intersect_domain(Domains);
271 ParamSpace = Schedule.get_space();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000272 ScatterSpace = getScatterSpace(Schedule);
273}
274
Tobias Grosser2ef37812017-08-07 22:01:29 +0000275/// Check if all stores in @p Stmt store the very same value.
276///
Michael Kruse8756b3f2017-08-09 09:29:15 +0000277/// This covers a special situation occurring in Polybench's
278/// covariance/correlation (which is typical for algorithms that cover symmetric
279/// matrices):
280///
281/// for (int i = 0; i < n; i += 1)
282/// for (int j = 0; j <= i; j += 1) {
283/// double x = ...;
284/// C[i][j] = x;
285/// C[j][i] = x;
286/// }
287///
288/// For i == j, the same value is written twice to the same element.Double
289/// writes to the same element are not allowed in DeLICM because its algorithm
290/// does not see which of the writes is effective.But if its the same value
291/// anyway, it doesn't matter.
292///
293/// LLVM passes, however, cannot simplify this because the write is necessary
294/// for i != j (unless it would add a condition for one of the writes to occur
295/// only if i != j).
296///
Tobias Grosser2ef37812017-08-07 22:01:29 +0000297/// TODO: In the future we may want to extent this to make the checks
298/// specific to different memory locations.
299static bool onlySameValueWrites(ScopStmt *Stmt) {
300 Value *V = nullptr;
301
302 for (auto *MA : *Stmt) {
303 if (!MA->isLatestArrayKind() || !MA->isMustWrite() ||
304 !MA->isOriginalArrayKind())
305 continue;
306
307 if (!V) {
308 V = MA->getAccessValue();
309 continue;
310 }
311
312 if (V != MA->getAccessValue())
313 return false;
314 }
315 return true;
316}
317
Michael Kruse47281842017-08-28 20:39:07 +0000318void ZoneAlgorithm::collectIncompatibleElts(ScopStmt *Stmt,
319 isl::union_set &IncompatibleElts,
320 isl::union_set &AllElts) {
Michael Kruse138a3fb2017-08-04 22:51:23 +0000321 auto Stores = makeEmptyUnionMap();
322 auto Loads = makeEmptyUnionMap();
323
324 // This assumes that the MemoryKind::Array MemoryAccesses are iterated in
325 // order.
326 for (auto *MA : *Stmt) {
Michael Kruseff426d92017-10-31 12:50:25 +0000327 if (!MA->isOriginalArrayKind())
Michael Kruse138a3fb2017-08-04 22:51:23 +0000328 continue;
329
Michael Kruse47281842017-08-28 20:39:07 +0000330 isl::map AccRelMap = getAccessRelationFor(MA);
331 isl::union_map AccRel = AccRelMap;
332
333 // To avoid solving any ILP problems, always add entire arrays instead of
334 // just the elements that are accessed.
335 auto ArrayElts = isl::set::universe(AccRelMap.get_space().range());
336 AllElts = AllElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000337
338 if (MA->isRead()) {
339 // Reject load after store to same location.
Tobias Grosserd3d3d6b2018-04-29 00:28:26 +0000340 if (!Stores.is_disjoint(AccRel)) {
Nicola Zaghen349506a2018-05-15 13:37:17 +0000341 LLVM_DEBUG(
342 dbgs() << "Load after store of same element in same statement\n");
Michael Kruse138a3fb2017-08-04 22:51:23 +0000343 OptimizationRemarkMissed R(PassName, "LoadAfterStore",
344 MA->getAccessInstruction());
345 R << "load after store of same element in same statement";
346 R << " (previous stores: " << Stores;
347 R << ", loading: " << AccRel << ")";
348 S->getFunction().getContext().diagnose(R);
Michael Kruse47281842017-08-28 20:39:07 +0000349
350 IncompatibleElts = IncompatibleElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000351 }
352
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000353 Loads = Loads.unite(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000354
355 continue;
356 }
357
Michael Kruse138a3fb2017-08-04 22:51:23 +0000358 // In region statements the order is less clear, eg. the load and store
359 // might be in a boxed loop.
Tobias Grosserd3d3d6b2018-04-29 00:28:26 +0000360 if (Stmt->isRegionStmt() && !Loads.is_disjoint(AccRel)) {
Nicola Zaghen349506a2018-05-15 13:37:17 +0000361 LLVM_DEBUG(dbgs() << "WRITE in non-affine subregion not supported\n");
Michael Kruse138a3fb2017-08-04 22:51:23 +0000362 OptimizationRemarkMissed R(PassName, "StoreInSubregion",
363 MA->getAccessInstruction());
364 R << "store is in a non-affine subregion";
365 S->getFunction().getContext().diagnose(R);
Michael Kruse47281842017-08-28 20:39:07 +0000366
367 IncompatibleElts = IncompatibleElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000368 }
369
370 // Do not allow more than one store to the same location.
Tobias Grosserd3d3d6b2018-04-29 00:28:26 +0000371 if (!Stores.is_disjoint(AccRel) && !onlySameValueWrites(Stmt)) {
Nicola Zaghen349506a2018-05-15 13:37:17 +0000372 LLVM_DEBUG(dbgs() << "WRITE after WRITE to same element\n");
Michael Kruse138a3fb2017-08-04 22:51:23 +0000373 OptimizationRemarkMissed R(PassName, "StoreAfterStore",
374 MA->getAccessInstruction());
Michael Krusea9033aa2017-08-09 09:29:09 +0000375 R << "store after store of same element in same statement";
376 R << " (previous stores: " << Stores;
377 R << ", storing: " << AccRel << ")";
378 S->getFunction().getContext().diagnose(R);
Michael Kruse47281842017-08-28 20:39:07 +0000379
380 IncompatibleElts = IncompatibleElts.add_set(ArrayElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000381 }
382
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000383 Stores = Stores.unite(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000384 }
Michael Kruse138a3fb2017-08-04 22:51:23 +0000385}
386
387void ZoneAlgorithm::addArrayReadAccess(MemoryAccess *MA) {
388 assert(MA->isLatestArrayKind());
389 assert(MA->isRead());
Michael Kruse70af4f52017-08-07 18:40:29 +0000390 ScopStmt *Stmt = MA->getStatement();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000391
392 // { DomainRead[] -> Element[] }
Michael Kruse47281842017-08-28 20:39:07 +0000393 auto AccRel = intersectRange(getAccessRelationFor(MA), CompatibleElts);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000394 AllReads = AllReads.add_map(AccRel);
Michael Kruse70af4f52017-08-07 18:40:29 +0000395
396 if (LoadInst *Load = dyn_cast_or_null<LoadInst>(MA->getAccessInstruction())) {
397 // { DomainRead[] -> ValInst[] }
398 isl::map LoadValInst = makeValInst(
399 Load, Stmt, LI->getLoopFor(Load->getParent()), Stmt->isBlockStmt());
400
401 // { DomainRead[] -> [Element[] -> DomainRead[]] }
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000402 isl::map IncludeElement = AccRel.domain_map().curry();
Michael Kruse70af4f52017-08-07 18:40:29 +0000403
404 // { [Element[] -> DomainRead[]] -> ValInst[] }
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000405 isl::map EltLoadValInst = LoadValInst.apply_domain(IncludeElement);
Michael Kruse70af4f52017-08-07 18:40:29 +0000406
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000407 AllReadValInst = AllReadValInst.add_map(EltLoadValInst);
Michael Kruse70af4f52017-08-07 18:40:29 +0000408 }
Michael Kruse138a3fb2017-08-04 22:51:23 +0000409}
410
Michael Kruse68821a82017-10-31 16:11:46 +0000411isl::union_map ZoneAlgorithm::getWrittenValue(MemoryAccess *MA,
412 isl::map AccRel) {
Michael Krusebd84ce82017-09-06 12:40:55 +0000413 if (!MA->isMustWrite())
414 return {};
415
416 Value *AccVal = MA->getAccessValue();
417 ScopStmt *Stmt = MA->getStatement();
418 Instruction *AccInst = MA->getAccessInstruction();
419
420 // Write a value to a single element.
421 auto L = MA->isOriginalArrayKind() ? LI->getLoopFor(AccInst->getParent())
422 : Stmt->getSurroundingLoop();
423 if (AccVal &&
424 AccVal->getType() == MA->getLatestScopArrayInfo()->getElementType() &&
Michael Kruseef8325b2017-09-18 17:43:50 +0000425 AccRel.is_single_valued().is_true())
Michael Kruse68821a82017-10-31 16:11:46 +0000426 return makeNormalizedValInst(AccVal, Stmt, L);
Michael Krusebd84ce82017-09-06 12:40:55 +0000427
428 // memset(_, '0', ) is equivalent to writing the null value to all touched
429 // elements. isMustWrite() ensures that all of an element's bytes are
430 // overwritten.
431 if (auto *Memset = dyn_cast<MemSetInst>(AccInst)) {
432 auto *WrittenConstant = dyn_cast<Constant>(Memset->getValue());
433 Type *Ty = MA->getLatestScopArrayInfo()->getElementType();
434 if (WrittenConstant && WrittenConstant->isZeroValue()) {
435 Constant *Zero = Constant::getNullValue(Ty);
Michael Kruse68821a82017-10-31 16:11:46 +0000436 return makeNormalizedValInst(Zero, Stmt, L);
Michael Krusebd84ce82017-09-06 12:40:55 +0000437 }
438 }
439
440 return {};
441}
442
Michael Kruse138a3fb2017-08-04 22:51:23 +0000443void ZoneAlgorithm::addArrayWriteAccess(MemoryAccess *MA) {
444 assert(MA->isLatestArrayKind());
445 assert(MA->isWrite());
446 auto *Stmt = MA->getStatement();
447
448 // { Domain[] -> Element[] }
Michael Kruse983fa9b2017-10-24 16:40:34 +0000449 isl::map AccRel = intersectRange(getAccessRelationFor(MA), CompatibleElts);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000450
451 if (MA->isMustWrite())
Michael Kruse983fa9b2017-10-24 16:40:34 +0000452 AllMustWrites = AllMustWrites.add_map(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000453
454 if (MA->isMayWrite())
Michael Kruse983fa9b2017-10-24 16:40:34 +0000455 AllMayWrites = AllMayWrites.add_map(AccRel);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000456
457 // { Domain[] -> ValInst[] }
Michael Kruse68821a82017-10-31 16:11:46 +0000458 isl::union_map WriteValInstance = getWrittenValue(MA, AccRel);
Michael Krusebd84ce82017-09-06 12:40:55 +0000459 if (!WriteValInstance)
460 WriteValInstance = makeUnknownForDomain(Stmt);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000461
462 // { Domain[] -> [Element[] -> Domain[]] }
Michael Kruse983fa9b2017-10-24 16:40:34 +0000463 isl::map IncludeElement = AccRel.domain_map().curry();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000464
465 // { [Element[] -> DomainWrite[]] -> ValInst[] }
Michael Kruse68821a82017-10-31 16:11:46 +0000466 isl::union_map EltWriteValInst =
467 WriteValInstance.apply_domain(IncludeElement);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000468
Michael Kruse68821a82017-10-31 16:11:46 +0000469 AllWriteValInst = AllWriteValInst.unite(EltWriteValInst);
470}
471
472/// Return whether @p PHI refers (also transitively through other PHIs) to
473/// itself.
474///
475/// loop:
476/// %phi1 = phi [0, %preheader], [%phi1, %loop]
477/// br i1 %c, label %loop, label %exit
478///
479/// exit:
480/// %phi2 = phi [%phi1, %bb]
481///
482/// In this example, %phi1 is recursive, but %phi2 is not.
483static bool isRecursivePHI(const PHINode *PHI) {
484 SmallVector<const PHINode *, 8> Worklist;
485 SmallPtrSet<const PHINode *, 8> Visited;
486 Worklist.push_back(PHI);
487
488 while (!Worklist.empty()) {
489 const PHINode *Cur = Worklist.pop_back_val();
490
491 if (Visited.count(Cur))
492 continue;
493 Visited.insert(Cur);
494
495 for (const Use &Incoming : Cur->incoming_values()) {
496 Value *IncomingVal = Incoming.get();
497 auto *IncomingPHI = dyn_cast<PHINode>(IncomingVal);
498 if (!IncomingPHI)
499 continue;
500
501 if (IncomingPHI == PHI)
502 return true;
503 Worklist.push_back(IncomingPHI);
504 }
505 }
506 return false;
507}
508
509isl::union_map ZoneAlgorithm::computePerPHI(const ScopArrayInfo *SAI) {
510 // TODO: If the PHI has an incoming block from before the SCoP, it is not
511 // represented in any ScopStmt.
512
513 auto *PHI = cast<PHINode>(SAI->getBasePtr());
514 auto It = PerPHIMaps.find(PHI);
515 if (It != PerPHIMaps.end())
516 return It->second;
517
518 assert(SAI->isPHIKind());
519
520 // { DomainPHIWrite[] -> Scatter[] }
521 isl::union_map PHIWriteScatter = makeEmptyUnionMap();
522
523 // Collect all incoming block timepoints.
524 for (MemoryAccess *MA : S->getPHIIncomings(SAI)) {
525 isl::map Scatter = getScatterFor(MA);
526 PHIWriteScatter = PHIWriteScatter.add_map(Scatter);
527 }
528
529 // { DomainPHIRead[] -> Scatter[] }
530 isl::map PHIReadScatter = getScatterFor(S->getPHIRead(SAI));
531
532 // { DomainPHIRead[] -> Scatter[] }
533 isl::map BeforeRead = beforeScatter(PHIReadScatter, true);
534
535 // { Scatter[] }
536 isl::set WriteTimes = singleton(PHIWriteScatter.range(), ScatterSpace);
537
538 // { DomainPHIRead[] -> Scatter[] }
539 isl::map PHIWriteTimes = BeforeRead.intersect_range(WriteTimes);
540 isl::map LastPerPHIWrites = PHIWriteTimes.lexmax();
541
542 // { DomainPHIRead[] -> DomainPHIWrite[] }
543 isl::union_map Result =
544 isl::union_map(LastPerPHIWrites).apply_range(PHIWriteScatter.reverse());
545 assert(!Result.is_single_valued().is_false());
546 assert(!Result.is_injective().is_false());
547
548 PerPHIMaps.insert({PHI, Result});
549 return Result;
Michael Kruse138a3fb2017-08-04 22:51:23 +0000550}
551
552isl::union_set ZoneAlgorithm::makeEmptyUnionSet() const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000553 return isl::union_set::empty(ParamSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000554}
555
556isl::union_map ZoneAlgorithm::makeEmptyUnionMap() const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000557 return isl::union_map::empty(ParamSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000558}
559
Michael Kruse47281842017-08-28 20:39:07 +0000560void ZoneAlgorithm::collectCompatibleElts() {
561 // First find all the incompatible elements, then take the complement.
562 // We compile the list of compatible (rather than incompatible) elements so
563 // users can intersect with the list, not requiring a subtract operation. It
564 // also allows us to define a 'universe' of all elements and makes it more
565 // explicit in which array elements can be used.
566 isl::union_set AllElts = makeEmptyUnionSet();
567 isl::union_set IncompatibleElts = makeEmptyUnionSet();
568
569 for (auto &Stmt : *S)
570 collectIncompatibleElts(&Stmt, IncompatibleElts, AllElts);
571
Tobias Grosserd3d3d6b2018-04-29 00:28:26 +0000572 NumIncompatibleArrays += isl_union_set_n_set(IncompatibleElts.get());
Michael Kruse47281842017-08-28 20:39:07 +0000573 CompatibleElts = AllElts.subtract(IncompatibleElts);
Tobias Grosserd3d3d6b2018-04-29 00:28:26 +0000574 NumCompatibleArrays += isl_union_set_n_set(CompatibleElts.get());
Michael Kruse138a3fb2017-08-04 22:51:23 +0000575}
576
577isl::map ZoneAlgorithm::getScatterFor(ScopStmt *Stmt) const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000578 isl::space ResultSpace =
579 Stmt->getDomainSpace().map_from_domain_and_range(ScatterSpace);
580 return Schedule.extract_map(ResultSpace);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000581}
582
583isl::map ZoneAlgorithm::getScatterFor(MemoryAccess *MA) const {
584 return getScatterFor(MA->getStatement());
585}
586
587isl::union_map ZoneAlgorithm::getScatterFor(isl::union_set Domain) const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000588 return Schedule.intersect_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000589}
590
591isl::map ZoneAlgorithm::getScatterFor(isl::set Domain) const {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000592 auto ResultSpace = Domain.get_space().map_from_domain_and_range(ScatterSpace);
593 auto UDomain = isl::union_set(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000594 auto UResult = getScatterFor(std::move(UDomain));
595 auto Result = singleton(std::move(UResult), std::move(ResultSpace));
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000596 assert(!Result || Result.domain().is_equal(Domain) == isl_bool_true);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000597 return Result;
598}
599
600isl::set ZoneAlgorithm::getDomainFor(ScopStmt *Stmt) const {
Tobias Grosserdcf8d692017-08-06 16:39:52 +0000601 return Stmt->getDomain().remove_redundancies();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000602}
603
604isl::set ZoneAlgorithm::getDomainFor(MemoryAccess *MA) const {
605 return getDomainFor(MA->getStatement());
606}
607
608isl::map ZoneAlgorithm::getAccessRelationFor(MemoryAccess *MA) const {
609 auto Domain = getDomainFor(MA);
610 auto AccRel = MA->getLatestAccessRelation();
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000611 return AccRel.intersect_domain(Domain);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000612}
613
614isl::map ZoneAlgorithm::getScalarReachingDefinition(ScopStmt *Stmt) {
615 auto &Result = ScalarReachDefZone[Stmt];
616 if (Result)
617 return Result;
618
619 auto Domain = getDomainFor(Stmt);
620 Result = computeScalarReachingDefinition(Schedule, Domain, false, true);
621 simplify(Result);
622
623 return Result;
624}
625
626isl::map ZoneAlgorithm::getScalarReachingDefinition(isl::set DomainDef) {
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000627 auto DomId = DomainDef.get_tuple_id();
Tobias Grosserd3d3d6b2018-04-29 00:28:26 +0000628 auto *Stmt = static_cast<ScopStmt *>(isl_id_get_user(DomId.get()));
Michael Kruse138a3fb2017-08-04 22:51:23 +0000629
630 auto StmtResult = getScalarReachingDefinition(Stmt);
631
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000632 return StmtResult.intersect_range(DomainDef);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000633}
634
635isl::map ZoneAlgorithm::makeUnknownForDomain(ScopStmt *Stmt) const {
636 return ::makeUnknownForDomain(getDomainFor(Stmt));
637}
638
639isl::id ZoneAlgorithm::makeValueId(Value *V) {
640 if (!V)
641 return nullptr;
642
643 auto &Id = ValueIds[V];
644 if (Id.is_null()) {
645 auto Name = getIslCompatibleName("Val_", V, ValueIds.size() - 1,
646 std::string(), UseInstructionNames);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000647 Id = isl::id::alloc(IslCtx.get(), Name.c_str(), V);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000648 }
649 return Id;
650}
651
652isl::space ZoneAlgorithm::makeValueSpace(Value *V) {
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000653 auto Result = ParamSpace.set_from_params();
654 return Result.set_tuple_id(isl::dim::set, makeValueId(V));
Michael Kruse138a3fb2017-08-04 22:51:23 +0000655}
656
657isl::set ZoneAlgorithm::makeValueSet(Value *V) {
658 auto Space = makeValueSpace(V);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000659 return isl::set::universe(Space);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000660}
661
662isl::map ZoneAlgorithm::makeValInst(Value *Val, ScopStmt *UserStmt, Loop *Scope,
663 bool IsCertain) {
664 // If the definition/write is conditional, the value at the location could
665 // be either the written value or the old value. Since we cannot know which
666 // one, consider the value to be unknown.
667 if (!IsCertain)
668 return makeUnknownForDomain(UserStmt);
669
670 auto DomainUse = getDomainFor(UserStmt);
671 auto VUse = VirtualUse::create(S, UserStmt, Scope, Val, true);
672 switch (VUse.getKind()) {
673 case VirtualUse::Constant:
674 case VirtualUse::Block:
675 case VirtualUse::Hoisted:
676 case VirtualUse::ReadOnly: {
677 // The definition does not depend on the statement which uses it.
678 auto ValSet = makeValueSet(Val);
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000679 return isl::map::from_domain_and_range(DomainUse, ValSet);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000680 }
681
682 case VirtualUse::Synthesizable: {
683 auto *ScevExpr = VUse.getScevExpr();
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000684 auto UseDomainSpace = DomainUse.get_space();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000685
686 // Construct the SCEV space.
687 // TODO: Add only the induction variables referenced in SCEVAddRecExpr
688 // expressions, not just all of them.
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000689 auto ScevId = isl::manage(isl_id_alloc(
690 UseDomainSpace.get_ctx().get(), nullptr, const_cast<SCEV *>(ScevExpr)));
691
692 auto ScevSpace = UseDomainSpace.drop_dims(isl::dim::set, 0, 0);
693 ScevSpace = ScevSpace.set_tuple_id(isl::dim::set, ScevId);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000694
695 // { DomainUse[] -> ScevExpr[] }
Tobias Grosser2f549fd2018-04-28 21:22:17 +0000696 auto ValInst =
697 isl::map::identity(UseDomainSpace.map_from_domain_and_range(ScevSpace));
Michael Kruse138a3fb2017-08-04 22:51:23 +0000698 return ValInst;
699 }
700
701 case VirtualUse::Intra: {
702 // Definition and use is in the same statement. We do not need to compute
703 // a reaching definition.
704
705 // { llvm::Value }
706 auto ValSet = makeValueSet(Val);
707
708 // { UserDomain[] -> llvm::Value }
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000709 auto ValInstSet = isl::map::from_domain_and_range(DomainUse, ValSet);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000710
711 // { UserDomain[] -> [UserDomain[] - >llvm::Value] }
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000712 auto Result = ValInstSet.domain_map().reverse();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000713 simplify(Result);
714 return Result;
715 }
716
717 case VirtualUse::Inter: {
718 // The value is defined in a different statement.
719
720 auto *Inst = cast<Instruction>(Val);
721 auto *ValStmt = S->getStmtFor(Inst);
722
723 // If the llvm::Value is defined in a removed Stmt, we cannot derive its
724 // domain. We could use an arbitrary statement, but this could result in
725 // different ValInst[] for the same llvm::Value.
726 if (!ValStmt)
727 return ::makeUnknownForDomain(DomainUse);
728
729 // { DomainDef[] }
730 auto DomainDef = getDomainFor(ValStmt);
731
732 // { Scatter[] -> DomainDef[] }
733 auto ReachDef = getScalarReachingDefinition(DomainDef);
734
735 // { DomainUse[] -> Scatter[] }
736 auto UserSched = getScatterFor(DomainUse);
737
738 // { DomainUse[] -> DomainDef[] }
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000739 auto UsedInstance = UserSched.apply_range(ReachDef);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000740
741 // { llvm::Value }
742 auto ValSet = makeValueSet(Val);
743
744 // { DomainUse[] -> llvm::Value[] }
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000745 auto ValInstSet = isl::map::from_domain_and_range(DomainUse, ValSet);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000746
747 // { DomainUse[] -> [DomainDef[] -> llvm::Value] }
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000748 auto Result = UsedInstance.range_product(ValInstSet);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000749
750 simplify(Result);
751 return Result;
752 }
753 }
754 llvm_unreachable("Unhandled use type");
755}
756
Michael Kruse68821a82017-10-31 16:11:46 +0000757/// Remove all computed PHIs out of @p Input and replace by their incoming
758/// value.
759///
760/// @param Input { [] -> ValInst[] }
761/// @param ComputedPHIs Set of PHIs that are replaced. Its ValInst must appear
762/// on the LHS of @p NormalizeMap.
763/// @param NormalizeMap { ValInst[] -> ValInst[] }
764static isl::union_map normalizeValInst(isl::union_map Input,
765 const DenseSet<PHINode *> &ComputedPHIs,
766 isl::union_map NormalizeMap) {
767 isl::union_map Result = isl::union_map::empty(Input.get_space());
768 Input.foreach_map(
769 [&Result, &ComputedPHIs, &NormalizeMap](isl::map Map) -> isl::stat {
770 isl::space Space = Map.get_space();
771 isl::space RangeSpace = Space.range();
772
773 // Instructions within the SCoP are always wrapped. Non-wrapped tuples
774 // are therefore invariant in the SCoP and don't need normalization.
775 if (!RangeSpace.is_wrapping()) {
776 Result = Result.add_map(Map);
777 return isl::stat::ok;
778 }
779
780 auto *PHI = dyn_cast<PHINode>(static_cast<Value *>(
781 RangeSpace.unwrap().get_tuple_id(isl::dim::out).get_user()));
782
783 // If no normalization is necessary, then the ValInst stands for itself.
784 if (!ComputedPHIs.count(PHI)) {
785 Result = Result.add_map(Map);
786 return isl::stat::ok;
787 }
788
789 // Otherwise, apply the normalization.
790 isl::union_map Mapped = isl::union_map(Map).apply_range(NormalizeMap);
791 Result = Result.unite(Mapped);
792 NumPHINormialization++;
793 return isl::stat::ok;
794 });
795 return Result;
796}
797
798isl::union_map ZoneAlgorithm::makeNormalizedValInst(llvm::Value *Val,
799 ScopStmt *UserStmt,
800 llvm::Loop *Scope,
801 bool IsCertain) {
802 isl::map ValInst = makeValInst(Val, UserStmt, Scope, IsCertain);
803 isl::union_map Normalized =
804 normalizeValInst(ValInst, ComputedPHIs, NormalizeMap);
805 return Normalized;
806}
807
Michael Kruse47281842017-08-28 20:39:07 +0000808bool ZoneAlgorithm::isCompatibleAccess(MemoryAccess *MA) {
809 if (!MA)
810 return false;
811 if (!MA->isLatestArrayKind())
812 return false;
813 Instruction *AccInst = MA->getAccessInstruction();
814 return isa<StoreInst>(AccInst) || isa<LoadInst>(AccInst);
815}
816
Michael Kruse68821a82017-10-31 16:11:46 +0000817bool ZoneAlgorithm::isNormalizable(MemoryAccess *MA) {
818 assert(MA->isRead());
819
820 // Exclude ExitPHIs, we are assuming that a normalizable PHI has a READ
821 // MemoryAccess.
822 if (!MA->isOriginalPHIKind())
823 return false;
824
825 // Exclude recursive PHIs, normalizing them would require a transitive
826 // closure.
827 auto *PHI = cast<PHINode>(MA->getAccessInstruction());
828 if (RecursivePHIs.count(PHI))
829 return false;
830
831 // Ensure that each incoming value can be represented by a ValInst[].
832 // We do represent values from statements associated to multiple incoming
833 // value by the PHI itself, but we do not handle this case yet (especially
834 // isNormalized()) when normalizing.
835 const ScopArrayInfo *SAI = MA->getOriginalScopArrayInfo();
836 auto Incomings = S->getPHIIncomings(SAI);
837 for (MemoryAccess *Incoming : Incomings) {
838 if (Incoming->getIncoming().size() != 1)
839 return false;
840 }
841
842 return true;
843}
844
Michael Krused51fbfc2018-05-31 22:44:23 +0000845isl::boolean ZoneAlgorithm::isNormalized(isl::map Map) {
Michael Kruse68821a82017-10-31 16:11:46 +0000846 isl::space Space = Map.get_space();
847 isl::space RangeSpace = Space.range();
848
Michael Krused51fbfc2018-05-31 22:44:23 +0000849 isl::boolean IsWrapping = RangeSpace.is_wrapping();
850 if (!IsWrapping.is_true())
851 return !IsWrapping;
852 isl::space Unwrapped = RangeSpace.unwrap();
Michael Kruse68821a82017-10-31 16:11:46 +0000853
Michael Krused51fbfc2018-05-31 22:44:23 +0000854 isl::id OutTupleId = Unwrapped.get_tuple_id(isl::dim::out);
855 if (OutTupleId.is_null())
856 return isl::boolean();
857 auto *PHI = dyn_cast<PHINode>(static_cast<Value *>(OutTupleId.get_user()));
Michael Kruse68821a82017-10-31 16:11:46 +0000858 if (!PHI)
859 return true;
860
Michael Krused51fbfc2018-05-31 22:44:23 +0000861 isl::id InTupleId = Unwrapped.get_tuple_id(isl::dim::in);
862 if (OutTupleId.is_null())
863 return isl::boolean();
864 auto *IncomingStmt = static_cast<ScopStmt *>(InTupleId.get_user());
Michael Kruse68821a82017-10-31 16:11:46 +0000865 MemoryAccess *PHIRead = IncomingStmt->lookupPHIReadOf(PHI);
866 if (!isNormalizable(PHIRead))
867 return true;
868
869 return false;
870}
871
Michael Krused51fbfc2018-05-31 22:44:23 +0000872isl::boolean ZoneAlgorithm::isNormalized(isl::union_map UMap) {
873 isl::boolean Result = true;
874 UMap.foreach_map([this, &Result](isl::map Map) -> isl::stat {
875 Result = isNormalized(Map);
876 if (Result.is_true())
Michael Kruse68821a82017-10-31 16:11:46 +0000877 return isl::stat::ok;
878 return isl::stat::error;
879 });
Michael Krused51fbfc2018-05-31 22:44:23 +0000880 return Result;
Michael Kruse68821a82017-10-31 16:11:46 +0000881}
882
Michael Kruse138a3fb2017-08-04 22:51:23 +0000883void ZoneAlgorithm::computeCommon() {
884 AllReads = makeEmptyUnionMap();
885 AllMayWrites = makeEmptyUnionMap();
886 AllMustWrites = makeEmptyUnionMap();
887 AllWriteValInst = makeEmptyUnionMap();
Michael Kruse70af4f52017-08-07 18:40:29 +0000888 AllReadValInst = makeEmptyUnionMap();
Michael Kruse138a3fb2017-08-04 22:51:23 +0000889
Michael Kruse68821a82017-10-31 16:11:46 +0000890 // Default to empty, i.e. no normalization/replacement is taking place. Call
891 // computeNormalizedPHIs() to initialize.
892 NormalizeMap = makeEmptyUnionMap();
893 ComputedPHIs.clear();
894
Michael Kruse138a3fb2017-08-04 22:51:23 +0000895 for (auto &Stmt : *S) {
896 for (auto *MA : Stmt) {
897 if (!MA->isLatestArrayKind())
898 continue;
899
900 if (MA->isRead())
901 addArrayReadAccess(MA);
902
903 if (MA->isWrite())
904 addArrayWriteAccess(MA);
905 }
906 }
907
908 // { DomainWrite[] -> Element[] }
Tobias Grosserdaf68ea2018-04-28 22:11:48 +0000909 AllWrites = AllMustWrites.unite(AllMayWrites);
Michael Kruse138a3fb2017-08-04 22:51:23 +0000910
911 // { [Element[] -> Zone[]] -> DomainWrite[] }
912 WriteReachDefZone =
913 computeReachingDefinition(Schedule, AllWrites, false, true);
914 simplify(WriteReachDefZone);
915}
916
Michael Kruse68821a82017-10-31 16:11:46 +0000917void ZoneAlgorithm::computeNormalizedPHIs() {
918 // Determine which PHIs can reference themselves. They are excluded from
919 // normalization to avoid problems with transitive closures.
920 for (ScopStmt &Stmt : *S) {
921 for (MemoryAccess *MA : Stmt) {
922 if (!MA->isPHIKind())
923 continue;
924 if (!MA->isRead())
925 continue;
926
927 // TODO: Can be more efficient since isRecursivePHI can theoretically
928 // determine recursiveness for multiple values and/or cache results.
929 auto *PHI = cast<PHINode>(MA->getAccessInstruction());
930 if (isRecursivePHI(PHI)) {
931 NumRecursivePHIs++;
932 RecursivePHIs.insert(PHI);
933 }
934 }
935 }
936
937 // { PHIValInst[] -> IncomingValInst[] }
938 isl::union_map AllPHIMaps = makeEmptyUnionMap();
939
940 // Discover new PHIs and try to normalize them.
941 DenseSet<PHINode *> AllPHIs;
942 for (ScopStmt &Stmt : *S) {
943 for (MemoryAccess *MA : Stmt) {
944 if (!MA->isOriginalPHIKind())
945 continue;
946 if (!MA->isRead())
947 continue;
948 if (!isNormalizable(MA))
949 continue;
950
951 auto *PHI = cast<PHINode>(MA->getAccessInstruction());
952 const ScopArrayInfo *SAI = MA->getOriginalScopArrayInfo();
953
954 // { PHIDomain[] -> PHIValInst[] }
955 isl::map PHIValInst = makeValInst(PHI, &Stmt, Stmt.getSurroundingLoop());
956
957 // { IncomingDomain[] -> IncomingValInst[] }
958 isl::union_map IncomingValInsts = makeEmptyUnionMap();
959
960 // Get all incoming values.
961 for (MemoryAccess *MA : S->getPHIIncomings(SAI)) {
962 ScopStmt *IncomingStmt = MA->getStatement();
963
964 auto Incoming = MA->getIncoming();
965 assert(Incoming.size() == 1 && "The incoming value must be "
966 "representable by something else than "
967 "the PHI itself");
968 Value *IncomingVal = Incoming[0].second;
969
970 // { IncomingDomain[] -> IncomingValInst[] }
971 isl::map IncomingValInst = makeValInst(
972 IncomingVal, IncomingStmt, IncomingStmt->getSurroundingLoop());
973
974 IncomingValInsts = IncomingValInsts.add_map(IncomingValInst);
975 }
976
977 // Determine which instance of the PHI statement corresponds to which
978 // incoming value.
979 // { PHIDomain[] -> IncomingDomain[] }
980 isl::union_map PerPHI = computePerPHI(SAI);
981
982 // { PHIValInst[] -> IncomingValInst[] }
983 isl::union_map PHIMap =
984 PerPHI.apply_domain(PHIValInst).apply_range(IncomingValInsts);
985 assert(!PHIMap.is_single_valued().is_false());
986
987 // Resolve transitiveness: The incoming value of the newly discovered PHI
988 // may reference a previously normalized PHI. At the same time, already
989 // normalized PHIs might be normalized to the new PHI. At the end, none of
990 // the PHIs may appear on the right-hand-side of the normalization map.
991 PHIMap = normalizeValInst(PHIMap, AllPHIs, AllPHIMaps);
992 AllPHIs.insert(PHI);
993 AllPHIMaps = normalizeValInst(AllPHIMaps, AllPHIs, PHIMap);
994
995 AllPHIMaps = AllPHIMaps.unite(PHIMap);
996 NumNormalizablePHIs++;
997 }
998 }
999 simplify(AllPHIMaps);
1000
1001 // Apply the normalization.
1002 ComputedPHIs = AllPHIs;
1003 NormalizeMap = AllPHIMaps;
1004
1005 assert(!NormalizeMap || isNormalized(NormalizeMap));
1006}
1007
Michael Kruse138a3fb2017-08-04 22:51:23 +00001008void ZoneAlgorithm::printAccesses(llvm::raw_ostream &OS, int Indent) const {
1009 OS.indent(Indent) << "After accesses {\n";
1010 for (auto &Stmt : *S) {
1011 OS.indent(Indent + 4) << Stmt.getBaseName() << "\n";
1012 for (auto *MA : Stmt)
1013 MA->print(OS);
1014 }
1015 OS.indent(Indent) << "}\n";
1016}
Michael Kruse70af4f52017-08-07 18:40:29 +00001017
1018isl::union_map ZoneAlgorithm::computeKnownFromMustWrites() const {
1019 // { [Element[] -> Zone[]] -> [Element[] -> DomainWrite[]] }
1020 isl::union_map EltReachdDef = distributeDomain(WriteReachDefZone.curry());
1021
1022 // { [Element[] -> DomainWrite[]] -> ValInst[] }
1023 isl::union_map AllKnownWriteValInst = filterKnownValInst(AllWriteValInst);
1024
1025 // { [Element[] -> Zone[]] -> ValInst[] }
1026 return EltReachdDef.apply_range(AllKnownWriteValInst);
1027}
1028
1029isl::union_map ZoneAlgorithm::computeKnownFromLoad() const {
1030 // { Element[] }
1031 isl::union_set AllAccessedElts = AllReads.range().unite(AllWrites.range());
1032
1033 // { Element[] -> Scatter[] }
1034 isl::union_map EltZoneUniverse = isl::union_map::from_domain_and_range(
1035 AllAccessedElts, isl::set::universe(ScatterSpace));
1036
1037 // This assumes there are no "holes" in
1038 // isl_union_map_domain(WriteReachDefZone); alternatively, compute the zone
1039 // before the first write or that are not written at all.
1040 // { Element[] -> Scatter[] }
1041 isl::union_set NonReachDef =
1042 EltZoneUniverse.wrap().subtract(WriteReachDefZone.domain());
1043
1044 // { [Element[] -> Zone[]] -> ReachDefId[] }
1045 isl::union_map DefZone =
1046 WriteReachDefZone.unite(isl::union_map::from_domain(NonReachDef));
1047
1048 // { [Element[] -> Scatter[]] -> Element[] }
1049 isl::union_map EltZoneElt = EltZoneUniverse.domain_map();
1050
1051 // { [Element[] -> Zone[]] -> [Element[] -> ReachDefId[]] }
1052 isl::union_map DefZoneEltDefId = EltZoneElt.range_product(DefZone);
1053
1054 // { Element[] -> [Zone[] -> ReachDefId[]] }
1055 isl::union_map EltDefZone = DefZone.curry();
1056
1057 // { [Element[] -> Zone[] -> [Element[] -> ReachDefId[]] }
1058 isl::union_map EltZoneEltDefid = distributeDomain(EltDefZone);
1059
1060 // { [Element[] -> Scatter[]] -> DomainRead[] }
1061 isl::union_map Reads = AllReads.range_product(Schedule).reverse();
1062
1063 // { [Element[] -> Scatter[]] -> [Element[] -> DomainRead[]] }
1064 isl::union_map ReadsElt = EltZoneElt.range_product(Reads);
1065
1066 // { [Element[] -> Scatter[]] -> ValInst[] }
1067 isl::union_map ScatterKnown = ReadsElt.apply_range(AllReadValInst);
1068
1069 // { [Element[] -> ReachDefId[]] -> ValInst[] }
1070 isl::union_map DefidKnown =
1071 DefZoneEltDefId.apply_domain(ScatterKnown).reverse();
1072
1073 // { [Element[] -> Zone[]] -> ValInst[] }
1074 return DefZoneEltDefId.apply_range(DefidKnown);
1075}
1076
1077isl::union_map ZoneAlgorithm::computeKnown(bool FromWrite,
1078 bool FromRead) const {
1079 isl::union_map Result = makeEmptyUnionMap();
1080
1081 if (FromWrite)
1082 Result = Result.unite(computeKnownFromMustWrites());
1083
1084 if (FromRead)
1085 Result = Result.unite(computeKnownFromLoad());
1086
1087 simplify(Result);
1088 return Result;
1089}