blob: bfea54ec1894497c3721beb70d3a77db62a53192 [file] [log] [blame]
Ted Kremenekc478a142010-12-23 02:42:43 +00001//== ArrayBoundCheckerV2.cpp ------------------------------------*- C++ -*--==//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines ArrayBoundCheckerV2, which is a path-sensitive check
11// which looks for an out-of-bound array element access.
12//
13//===----------------------------------------------------------------------===//
14
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +000015#include "ClangSACheckers.h"
16#include "clang/StaticAnalyzer/Core/CheckerV2.h"
17#include "clang/StaticAnalyzer/Core/CheckerManager.h"
18#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
Ted Kremenek9b663712011-02-10 01:03:03 +000019#include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
Ted Kremenek9b663712011-02-10 01:03:03 +000020#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
Ted Kremenekc478a142010-12-23 02:42:43 +000021#include "clang/AST/CharUnits.h"
22
23using namespace clang;
Ted Kremenek9ef65372010-12-23 07:20:52 +000024using namespace ento;
Ted Kremenekc478a142010-12-23 02:42:43 +000025
26namespace {
27class ArrayBoundCheckerV2 :
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +000028 public CheckerV2<check::Location> {
29 mutable llvm::OwningPtr<BuiltinBug> BT;
Ted Kremenekc478a142010-12-23 02:42:43 +000030
31 enum OOB_Kind { OOB_Precedes, OOB_Excedes };
32
33 void reportOOB(CheckerContext &C, const GRState *errorState,
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +000034 OOB_Kind kind) const;
Ted Kremenekc478a142010-12-23 02:42:43 +000035
36public:
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +000037 void checkLocation(SVal l, bool isLoad, CheckerContext &C) const;
Ted Kremenekc478a142010-12-23 02:42:43 +000038};
39
40// FIXME: Eventually replace RegionRawOffset with this class.
41class RegionRawOffsetV2 {
42private:
43 const SubRegion *baseRegion;
44 SVal byteOffset;
45
46 RegionRawOffsetV2()
47 : baseRegion(0), byteOffset(UnknownVal()) {}
48
49public:
50 RegionRawOffsetV2(const SubRegion* base, SVal offset)
51 : baseRegion(base), byteOffset(offset) {}
52
53 NonLoc getByteOffset() const { return cast<NonLoc>(byteOffset); }
54 const SubRegion *getRegion() const { return baseRegion; }
55
56 static RegionRawOffsetV2 computeOffset(const GRState *state,
57 SValBuilder &svalBuilder,
58 SVal location);
59
60 void dump() const;
61 void dumpToStream(llvm::raw_ostream& os) const;
62};
63}
64
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +000065void ArrayBoundCheckerV2::checkLocation(SVal location, bool isLoad,
66 CheckerContext &checkerContext) const {
Ted Kremenekc478a142010-12-23 02:42:43 +000067
68 // NOTE: Instead of using GRState::assumeInBound(), we are prototyping
69 // some new logic here that reasons directly about memory region extents.
70 // Once that logic is more mature, we can bring it back to assumeInBound()
71 // for all clients to use.
72 //
73 // The algorithm we are using here for bounds checking is to see if the
74 // memory access is within the extent of the base region. Since we
75 // have some flexibility in defining the base region, we can achieve
76 // various levels of conservatism in our buffer overflow checking.
77 const GRState *state = checkerContext.getState();
78 const GRState *originalState = state;
79
80 SValBuilder &svalBuilder = checkerContext.getSValBuilder();
81 const RegionRawOffsetV2 &rawOffset =
82 RegionRawOffsetV2::computeOffset(state, svalBuilder, location);
83
84 if (!rawOffset.getRegion())
85 return;
86
87 // CHECK LOWER BOUND: Is byteOffset < 0? If so, we are doing a load/store
88 // before the first valid offset in the memory region.
89
90 SVal lowerBound
91 = svalBuilder.evalBinOpNN(state, BO_LT, rawOffset.getByteOffset(),
92 svalBuilder.makeZeroArrayIndex(),
93 svalBuilder.getConditionType());
94
95 NonLoc *lowerBoundToCheck = dyn_cast<NonLoc>(&lowerBound);
96 if (!lowerBoundToCheck)
97 return;
98
99 const GRState *state_precedesLowerBound, *state_withinLowerBound;
100 llvm::tie(state_precedesLowerBound, state_withinLowerBound) =
101 state->assume(*lowerBoundToCheck);
102
103 // Are we constrained enough to definitely precede the lower bound?
104 if (state_precedesLowerBound && !state_withinLowerBound) {
105 reportOOB(checkerContext, state_precedesLowerBound, OOB_Precedes);
106 return;
107 }
108
109 // Otherwise, assume the constraint of the lower bound.
110 assert(state_withinLowerBound);
111 state = state_withinLowerBound;
112
113 do {
114 // CHECK UPPER BOUND: Is byteOffset >= extent(baseRegion)? If so,
115 // we are doing a load/store after the last valid offset.
116 DefinedOrUnknownSVal extentVal =
117 rawOffset.getRegion()->getExtent(svalBuilder);
118 if (!isa<NonLoc>(extentVal))
119 break;
120
121 SVal upperbound
122 = svalBuilder.evalBinOpNN(state, BO_GE, rawOffset.getByteOffset(),
123 cast<NonLoc>(extentVal),
124 svalBuilder.getConditionType());
125
126 NonLoc *upperboundToCheck = dyn_cast<NonLoc>(&upperbound);
127 if (!upperboundToCheck)
128 break;
129
130 const GRState *state_exceedsUpperBound, *state_withinUpperBound;
131 llvm::tie(state_exceedsUpperBound, state_withinUpperBound) =
132 state->assume(*upperboundToCheck);
133
134 // Are we constrained enough to definitely exceed the upper bound?
135 if (state_exceedsUpperBound && !state_withinUpperBound) {
136 reportOOB(checkerContext, state_exceedsUpperBound, OOB_Excedes);
137 return;
138 }
139
140 assert(state_withinUpperBound);
141 state = state_withinUpperBound;
142 }
143 while (false);
144
145 if (state != originalState)
146 checkerContext.generateNode(state);
147}
148
149void ArrayBoundCheckerV2::reportOOB(CheckerContext &checkerContext,
150 const GRState *errorState,
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +0000151 OOB_Kind kind) const {
Ted Kremenekc478a142010-12-23 02:42:43 +0000152
153 ExplodedNode *errorNode = checkerContext.generateSink(errorState);
154 if (!errorNode)
155 return;
156
157 if (!BT)
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +0000158 BT.reset(new BuiltinBug("Out-of-bound access"));
Ted Kremenekc478a142010-12-23 02:42:43 +0000159
160 // FIXME: This diagnostics are preliminary. We should get far better
161 // diagnostics for explaining buffer overruns.
162
163 llvm::SmallString<256> buf;
164 llvm::raw_svector_ostream os(buf);
165 os << "Out of bound memory access "
166 << (kind == OOB_Precedes ? "(accessed memory precedes memory block)"
167 : "(access exceeds upper limit of memory block)");
168
169 checkerContext.EmitReport(new RangedBugReport(*BT, os.str(), errorNode));
170}
171
172void RegionRawOffsetV2::dump() const {
173 dumpToStream(llvm::errs());
174}
175
176void RegionRawOffsetV2::dumpToStream(llvm::raw_ostream& os) const {
177 os << "raw_offset_v2{" << getRegion() << ',' << getByteOffset() << '}';
178}
179
180// FIXME: Merge with the implementation of the same method in Store.cpp
181static bool IsCompleteType(ASTContext &Ctx, QualType Ty) {
182 if (const RecordType *RT = Ty->getAs<RecordType>()) {
183 const RecordDecl *D = RT->getDecl();
184 if (!D->getDefinition())
185 return false;
186 }
187
188 return true;
189}
190
191
192// Lazily computes a value to be used by 'computeOffset'. If 'val'
193// is unknown or undefined, we lazily substitute '0'. Otherwise,
194// return 'val'.
195static inline SVal getValue(SVal val, SValBuilder &svalBuilder) {
196 return isa<UndefinedVal>(val) ? svalBuilder.makeArrayIndex(0) : val;
197}
198
199// Scale a base value by a scaling factor, and return the scaled
200// value as an SVal. Used by 'computeOffset'.
201static inline SVal scaleValue(const GRState *state,
202 NonLoc baseVal, CharUnits scaling,
203 SValBuilder &sb) {
204 return sb.evalBinOpNN(state, BO_Mul, baseVal,
205 sb.makeArrayIndex(scaling.getQuantity()),
206 sb.getArrayIndexType());
207}
208
209// Add an SVal to another, treating unknown and undefined values as
210// summing to UnknownVal. Used by 'computeOffset'.
211static SVal addValue(const GRState *state, SVal x, SVal y,
212 SValBuilder &svalBuilder) {
213 // We treat UnknownVals and UndefinedVals the same here because we
214 // only care about computing offsets.
215 if (x.isUnknownOrUndef() || y.isUnknownOrUndef())
216 return UnknownVal();
217
218 return svalBuilder.evalBinOpNN(state, BO_Add,
219 cast<NonLoc>(x), cast<NonLoc>(y),
220 svalBuilder.getArrayIndexType());
221}
222
223/// Compute a raw byte offset from a base region. Used for array bounds
224/// checking.
225RegionRawOffsetV2 RegionRawOffsetV2::computeOffset(const GRState *state,
226 SValBuilder &svalBuilder,
227 SVal location)
228{
229 const MemRegion *region = location.getAsRegion();
230 SVal offset = UndefinedVal();
231
232 while (region) {
233 switch (region->getKind()) {
234 default: {
235 if (const SubRegion *subReg = dyn_cast<SubRegion>(region))
236 if (!offset.isUnknownOrUndef())
237 return RegionRawOffsetV2(subReg, offset);
238 return RegionRawOffsetV2();
239 }
240 case MemRegion::ElementRegionKind: {
241 const ElementRegion *elemReg = cast<ElementRegion>(region);
242 SVal index = elemReg->getIndex();
243 if (!isa<NonLoc>(index))
244 return RegionRawOffsetV2();
245 QualType elemType = elemReg->getElementType();
246 // If the element is an incomplete type, go no further.
247 ASTContext &astContext = svalBuilder.getContext();
248 if (!IsCompleteType(astContext, elemType))
249 return RegionRawOffsetV2();
250
251 // Update the offset.
252 offset = addValue(state,
253 getValue(offset, svalBuilder),
254 scaleValue(state,
255 cast<NonLoc>(index),
256 astContext.getTypeSizeInChars(elemType),
257 svalBuilder),
258 svalBuilder);
259
260 if (offset.isUnknownOrUndef())
261 return RegionRawOffsetV2();
262
263 region = elemReg->getSuperRegion();
264 continue;
265 }
266 }
267 }
268 return RegionRawOffsetV2();
269}
270
271
Argyrios Kyrtzidis05357012011-02-28 01:26:57 +0000272void ento::registerArrayBoundCheckerV2(CheckerManager &mgr) {
273 mgr.registerChecker<ArrayBoundCheckerV2>();
274}