blob: 405dc5320cdf480150aca523e9d6232da24c0956 [file] [log] [blame]
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001//===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00006//
7//===----------------------------------------------------------------------===//
8//
9/// \file
10/// This file is a part of HWAddressSanitizer, an address sanity checker
11/// based on tagged addressing.
12//===----------------------------------------------------------------------===//
13
14#include "llvm/ADT/SmallVector.h"
15#include "llvm/ADT/StringExtras.h"
16#include "llvm/ADT/StringRef.h"
17#include "llvm/ADT/Triple.h"
18#include "llvm/IR/Attributes.h"
19#include "llvm/IR/BasicBlock.h"
20#include "llvm/IR/Constant.h"
21#include "llvm/IR/Constants.h"
22#include "llvm/IR/DataLayout.h"
23#include "llvm/IR/DerivedTypes.h"
24#include "llvm/IR/Function.h"
25#include "llvm/IR/IRBuilder.h"
26#include "llvm/IR/InlineAsm.h"
27#include "llvm/IR/InstVisitor.h"
28#include "llvm/IR/Instruction.h"
29#include "llvm/IR/Instructions.h"
30#include "llvm/IR/IntrinsicInst.h"
31#include "llvm/IR/Intrinsics.h"
32#include "llvm/IR/LLVMContext.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000033#include "llvm/IR/MDBuilder.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000034#include "llvm/IR/Module.h"
35#include "llvm/IR/Type.h"
36#include "llvm/IR/Value.h"
37#include "llvm/Pass.h"
38#include "llvm/Support/Casting.h"
39#include "llvm/Support/CommandLine.h"
40#include "llvm/Support/Debug.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000041#include "llvm/Support/raw_ostream.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000042#include "llvm/Transforms/Instrumentation.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000043#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000044#include "llvm/Transforms/Utils/ModuleUtils.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000045#include "llvm/Transforms/Utils/PromoteMemToReg.h"
Kostya Serebryanyaf955972018-10-23 00:50:40 +000046#include <sstream>
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000047
48using namespace llvm;
49
50#define DEBUG_TYPE "hwasan"
51
52static const char *const kHwasanModuleCtorName = "hwasan.module_ctor";
53static const char *const kHwasanInitName = "__hwasan_init";
54
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000055static const char *const kHwasanShadowMemoryDynamicAddress =
56 "__hwasan_shadow_memory_dynamic_address";
57
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000058// Accesses sizes are powers of two: 1, 2, 4, 8, 16.
59static const size_t kNumberOfAccessSizes = 5;
60
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000061static const size_t kDefaultShadowScale = 4;
62static const uint64_t kDynamicShadowSentinel =
63 std::numeric_limits<uint64_t>::max();
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000064static const unsigned kPointerTagShift = 56;
65
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +000066static const unsigned kShadowBaseAlignment = 32;
67
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000068static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
69 "hwasan-memory-access-callback-prefix",
70 cl::desc("Prefix for memory access callbacks"), cl::Hidden,
71 cl::init("__hwasan_"));
72
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000073static cl::opt<bool>
74 ClInstrumentWithCalls("hwasan-instrument-with-calls",
75 cl::desc("instrument reads and writes with callbacks"),
76 cl::Hidden, cl::init(false));
77
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000078static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads",
79 cl::desc("instrument read instructions"),
80 cl::Hidden, cl::init(true));
81
82static cl::opt<bool> ClInstrumentWrites(
83 "hwasan-instrument-writes", cl::desc("instrument write instructions"),
84 cl::Hidden, cl::init(true));
85
86static cl::opt<bool> ClInstrumentAtomics(
87 "hwasan-instrument-atomics",
88 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
89 cl::init(true));
90
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +000091static cl::opt<bool> ClRecover(
92 "hwasan-recover",
93 cl::desc("Enable recovery mode (continue-after-error)."),
94 cl::Hidden, cl::init(false));
95
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000096static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack",
97 cl::desc("instrument stack (allocas)"),
98 cl::Hidden, cl::init(true));
99
Alex Shlyapnikov788764c2018-06-29 20:20:17 +0000100static cl::opt<bool> ClUARRetagToZero(
101 "hwasan-uar-retag-to-zero",
102 cl::desc("Clear alloca tags before returning from the function to allow "
103 "non-instrumented and instrumented function calls mix. When set "
104 "to false, allocas are retagged before returning from the "
105 "function to detect use after return."),
106 cl::Hidden, cl::init(true));
107
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000108static cl::opt<bool> ClGenerateTagsWithCalls(
109 "hwasan-generate-tags-with-calls",
110 cl::desc("generate new tags with runtime library calls"), cl::Hidden,
111 cl::init(false));
112
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000113static cl::opt<int> ClMatchAllTag(
114 "hwasan-match-all-tag",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000115 cl::desc("don't report bad accesses via pointers with this tag"),
116 cl::Hidden, cl::init(-1));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000117
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000118static cl::opt<bool> ClEnableKhwasan(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000119 "hwasan-kernel",
120 cl::desc("Enable KernelHWAddressSanitizer instrumentation"),
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000121 cl::Hidden, cl::init(false));
122
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000123// These flags allow to change the shadow mapping and control how shadow memory
124// is accessed. The shadow mapping looks like:
125// Shadow = (Mem >> scale) + offset
126
127static cl::opt<unsigned long long> ClMappingOffset(
128 "hwasan-mapping-offset",
129 cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"), cl::Hidden,
130 cl::init(0));
131
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000132static cl::opt<bool>
133 ClWithIfunc("hwasan-with-ifunc",
134 cl::desc("Access dynamic shadow through an ifunc global on "
135 "platforms that support this"),
136 cl::Hidden, cl::init(false));
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000137
138static cl::opt<bool> ClWithTls(
139 "hwasan-with-tls",
140 cl::desc("Access dynamic shadow through an thread-local pointer on "
141 "platforms that support this"),
142 cl::Hidden, cl::init(true));
143
144static cl::opt<bool>
145 ClRecordStackHistory("hwasan-record-stack-history",
146 cl::desc("Record stack frames with tagged allocations "
147 "in a thread-local ring buffer"),
148 cl::Hidden, cl::init(true));
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000149static cl::opt<bool>
150 ClCreateFrameDescriptions("hwasan-create-frame-descriptions",
151 cl::desc("create static frame descriptions"),
152 cl::Hidden, cl::init(true));
153
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000154static cl::opt<bool>
155 ClInstrumentMemIntrinsics("hwasan-instrument-mem-intrinsics",
156 cl::desc("instrument memory intrinsics"),
Eugene Leviant4dc3a3f2018-12-24 16:02:48 +0000157 cl::Hidden, cl::init(true));
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000158
159static cl::opt<bool> ClInlineAllChecks("hwasan-inline-all-checks",
160 cl::desc("inline all checks"),
161 cl::Hidden, cl::init(false));
162
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000163static cl::opt<bool> ClAllowIfunc("hwasan-allow-ifunc",
164 cl::desc("allow the use of ifunc"),
165 cl::Hidden, cl::init(false));
166
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000167namespace {
168
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000169/// An instrumentation pass implementing detection of addressability bugs
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000170/// using tagged pointers.
171class HWAddressSanitizer : public FunctionPass {
172public:
173 // Pass identification, replacement for typeid.
174 static char ID;
175
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000176 explicit HWAddressSanitizer(bool CompileKernel = false, bool Recover = false)
177 : FunctionPass(ID) {
178 this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover;
179 this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0 ?
180 ClEnableKhwasan : CompileKernel;
181 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000182
183 StringRef getPassName() const override { return "HWAddressSanitizer"; }
184
185 bool runOnFunction(Function &F) override;
186 bool doInitialization(Module &M) override;
187
188 void initializeCallbacks(Module &M);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000189
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000190 Value *getDynamicShadowIfunc(IRBuilder<> &IRB);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000191 Value *getDynamicShadowNonTls(IRBuilder<> &IRB);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000192
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000193 void untagPointerOperand(Instruction *I, Value *Addr);
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000194 Value *shadowBase();
195 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
196 void instrumentMemAccessInline(Value *Ptr, bool IsWrite,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000197 unsigned AccessSizeIndex,
198 Instruction *InsertBefore);
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000199 void instrumentMemIntrinsic(MemIntrinsic *MI);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000200 bool instrumentMemAccess(Instruction *I);
201 Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite,
202 uint64_t *TypeSize, unsigned *Alignment,
203 Value **MaybeMask);
204
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000205 bool isInterestingAlloca(const AllocaInst &AI);
206 bool tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000207 Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag);
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000208 Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000209 bool instrumentStack(SmallVectorImpl<AllocaInst *> &Allocas,
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000210 SmallVectorImpl<Instruction *> &RetVec, Value *StackTag);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000211 Value *getNextTagWithCall(IRBuilder<> &IRB);
212 Value *getStackBaseTag(IRBuilder<> &IRB);
213 Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI,
214 unsigned AllocaNo);
215 Value *getUARTag(IRBuilder<> &IRB, Value *StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000216
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000217 Value *getHwasanThreadSlotPtr(IRBuilder<> &IRB, Type *Ty);
218 Value *emitPrologue(IRBuilder<> &IRB, bool WithFrameRecord);
219
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000220private:
221 LLVMContext *C;
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000222 std::string CurModuleUniqueId;
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000223 Triple TargetTriple;
James Y Knightf47d6b32019-01-31 20:35:56 +0000224 FunctionCallee HWAsanMemmove, HWAsanMemcpy, HWAsanMemset;
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000225
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000226 // Frame description is a way to pass names/sizes of local variables
227 // to the run-time w/o adding extra executable code in every function.
228 // We do this by creating a separate section with {PC,Descr} pairs and passing
229 // the section beg/end to __hwasan_init_frames() at module init time.
230 std::string createFrameString(ArrayRef<AllocaInst*> Allocas);
231 void createFrameGlobal(Function &F, const std::string &FrameString);
232 // Get the section name for frame descriptions. Currently ELF-only.
233 const char *getFrameSection() { return "__hwasan_frames"; }
234 const char *getFrameSectionBeg() { return "__start___hwasan_frames"; }
235 const char *getFrameSectionEnd() { return "__stop___hwasan_frames"; }
236 GlobalVariable *createFrameSectionBound(Module &M, Type *Ty,
237 const char *Name) {
238 auto GV = new GlobalVariable(M, Ty, false, GlobalVariable::ExternalLinkage,
239 nullptr, Name);
240 GV->setVisibility(GlobalValue::HiddenVisibility);
241 return GV;
242 }
243
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000244 /// This struct defines the shadow mapping using the rule:
245 /// shadow = (mem >> Scale) + Offset.
246 /// If InGlobal is true, then
247 /// extern char __hwasan_shadow[];
248 /// shadow = (mem >> Scale) + &__hwasan_shadow
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000249 /// If InTls is true, then
250 /// extern char *__hwasan_tls;
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000251 /// shadow = (mem>>Scale) + align_up(__hwasan_shadow, kShadowBaseAlignment)
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000252 struct ShadowMapping {
253 int Scale;
254 uint64_t Offset;
255 bool InGlobal;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000256 bool InTls;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000257
258 void init(Triple &TargetTriple);
259 unsigned getAllocaAlignment() const { return 1U << Scale; }
260 };
261 ShadowMapping Mapping;
262
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000263 Type *IntptrTy;
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000264 Type *Int8PtrTy;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000265 Type *Int8Ty;
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000266 Type *Int32Ty;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000267
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000268 bool CompileKernel;
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000269 bool Recover;
270
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000271 Function *HwasanCtorFunction;
272
James Y Knightf47d6b32019-01-31 20:35:56 +0000273 FunctionCallee HwasanMemoryAccessCallback[2][kNumberOfAccessSizes];
274 FunctionCallee HwasanMemoryAccessCallbackSized[2];
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000275
James Y Knightf47d6b32019-01-31 20:35:56 +0000276 FunctionCallee HwasanTagMemoryFunc;
277 FunctionCallee HwasanGenerateTagFunc;
278 FunctionCallee HwasanThreadEnterFunc;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000279
280 Constant *ShadowGlobal;
281
282 Value *LocalDynamicShadow = nullptr;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000283 GlobalValue *ThreadPtrGlobal = nullptr;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000284};
285
286} // end anonymous namespace
287
288char HWAddressSanitizer::ID = 0;
289
290INITIALIZE_PASS_BEGIN(
291 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000292 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
293 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000294INITIALIZE_PASS_END(
295 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000296 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
297 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000298
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000299FunctionPass *llvm::createHWAddressSanitizerPass(bool CompileKernel,
300 bool Recover) {
301 assert(!CompileKernel || Recover);
302 return new HWAddressSanitizer(CompileKernel, Recover);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000303}
304
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000305/// Module-level initialization.
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000306///
307/// inserts a call to __hwasan_init to the module's constructor list.
308bool HWAddressSanitizer::doInitialization(Module &M) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000309 LLVM_DEBUG(dbgs() << "Init " << M.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000310 auto &DL = M.getDataLayout();
311
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000312 TargetTriple = Triple(M.getTargetTriple());
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000313
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000314 Mapping.init(TargetTriple);
315
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000316 C = &(M.getContext());
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000317 CurModuleUniqueId = getUniqueModuleId(&M);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000318 IRBuilder<> IRB(*C);
319 IntptrTy = IRB.getIntPtrTy(DL);
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000320 Int8PtrTy = IRB.getInt8PtrTy();
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000321 Int8Ty = IRB.getInt8Ty();
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000322 Int32Ty = IRB.getInt32Ty();
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000323
Benjamin Kramerbfc1d972018-01-18 14:19:04 +0000324 HwasanCtorFunction = nullptr;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000325 if (!CompileKernel) {
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000326 std::tie(HwasanCtorFunction, std::ignore) =
327 createSanitizerCtorAndInitFunctions(M, kHwasanModuleCtorName,
328 kHwasanInitName,
329 /*InitArgTypes=*/{},
330 /*InitArgs=*/{});
Peter Collingbourned3a3e4b2018-12-17 22:56:34 +0000331 Comdat *CtorComdat = M.getOrInsertComdat(kHwasanModuleCtorName);
332 HwasanCtorFunction->setComdat(CtorComdat);
333 appendToGlobalCtors(M, HwasanCtorFunction, 0, HwasanCtorFunction);
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000334
Peter Collingbourned3a3e4b2018-12-17 22:56:34 +0000335 // Create a zero-length global in __hwasan_frame so that the linker will
336 // always create start and stop symbols.
337 //
338 // N.B. If we ever start creating associated metadata in this pass this
339 // global will need to be associated with the ctor.
340 Type *Int8Arr0Ty = ArrayType::get(Int8Ty, 0);
341 auto GV =
342 new GlobalVariable(M, Int8Arr0Ty, /*isConstantGlobal*/ true,
343 GlobalVariable::PrivateLinkage,
344 Constant::getNullValue(Int8Arr0Ty), "__hwasan");
345 GV->setSection(getFrameSection());
346 GV->setComdat(CtorComdat);
347 appendToCompilerUsed(M, GV);
348
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000349 IRBuilder<> IRBCtor(HwasanCtorFunction->getEntryBlock().getTerminator());
350 IRBCtor.CreateCall(
351 declareSanitizerInitFunction(M, "__hwasan_init_frames",
352 {Int8PtrTy, Int8PtrTy}),
353 {createFrameSectionBound(M, Int8Ty, getFrameSectionBeg()),
354 createFrameSectionBound(M, Int8Ty, getFrameSectionEnd())});
355 }
356
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000357 if (!TargetTriple.isAndroid())
358 appendToCompilerUsed(
359 M, ThreadPtrGlobal = new GlobalVariable(
360 M, IntptrTy, false, GlobalVariable::ExternalLinkage, nullptr,
361 "__hwasan_tls", nullptr, GlobalVariable::InitialExecTLSModel));
362
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000363 return true;
364}
365
366void HWAddressSanitizer::initializeCallbacks(Module &M) {
367 IRBuilder<> IRB(*C);
368 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
369 const std::string TypeStr = AccessIsWrite ? "store" : "load";
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000370 const std::string EndingStr = Recover ? "_noabort" : "";
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000371
James Y Knightf47d6b32019-01-31 20:35:56 +0000372 HwasanMemoryAccessCallbackSized[AccessIsWrite] = M.getOrInsertFunction(
373 ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr,
374 FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false));
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000375
376 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
377 AccessSizeIndex++) {
378 HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] =
James Y Knightf47d6b32019-01-31 20:35:56 +0000379 M.getOrInsertFunction(
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000380 ClMemoryAccessCallbackPrefix + TypeStr +
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000381 itostr(1ULL << AccessSizeIndex) + EndingStr,
James Y Knightf47d6b32019-01-31 20:35:56 +0000382 FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false));
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000383 }
384 }
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000385
James Y Knightf47d6b32019-01-31 20:35:56 +0000386 HwasanTagMemoryFunc = M.getOrInsertFunction(
387 "__hwasan_tag_memory", IRB.getVoidTy(), Int8PtrTy, Int8Ty, IntptrTy);
388 HwasanGenerateTagFunc =
389 M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000390
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000391 ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow",
392 ArrayType::get(IRB.getInt8Ty(), 0));
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000393
394 const std::string MemIntrinCallbackPrefix =
395 CompileKernel ? std::string("") : ClMemoryAccessCallbackPrefix;
James Y Knightf47d6b32019-01-31 20:35:56 +0000396 HWAsanMemmove = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memmove",
397 IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
398 IRB.getInt8PtrTy(), IntptrTy);
399 HWAsanMemcpy = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memcpy",
400 IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
401 IRB.getInt8PtrTy(), IntptrTy);
402 HWAsanMemset = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memset",
403 IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
404 IRB.getInt32Ty(), IntptrTy);
Peter Collingbourne87f477b2019-01-04 19:27:04 +0000405
James Y Knightf47d6b32019-01-31 20:35:56 +0000406 HwasanThreadEnterFunc =
407 M.getOrInsertFunction("__hwasan_thread_enter", IRB.getVoidTy());
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000408}
409
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000410Value *HWAddressSanitizer::getDynamicShadowIfunc(IRBuilder<> &IRB) {
411 // An empty inline asm with input reg == output reg.
412 // An opaque no-op cast, basically.
413 InlineAsm *Asm = InlineAsm::get(
414 FunctionType::get(Int8PtrTy, {ShadowGlobal->getType()}, false),
415 StringRef(""), StringRef("=r,0"),
416 /*hasSideEffects=*/false);
417 return IRB.CreateCall(Asm, {ShadowGlobal}, ".hwasan.shadow");
418}
419
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000420Value *HWAddressSanitizer::getDynamicShadowNonTls(IRBuilder<> &IRB) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000421 // Generate code only when dynamic addressing is needed.
422 if (Mapping.Offset != kDynamicShadowSentinel)
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000423 return nullptr;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000424
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000425 if (Mapping.InGlobal) {
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000426 return getDynamicShadowIfunc(IRB);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000427 } else {
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000428 Value *GlobalDynamicAddress =
429 IRB.GetInsertBlock()->getParent()->getParent()->getOrInsertGlobal(
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000430 kHwasanShadowMemoryDynamicAddress, Int8PtrTy);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000431 return IRB.CreateLoad(GlobalDynamicAddress);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000432 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000433}
434
435Value *HWAddressSanitizer::isInterestingMemoryAccess(Instruction *I,
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000436 bool *IsWrite,
437 uint64_t *TypeSize,
438 unsigned *Alignment,
439 Value **MaybeMask) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000440 // Skip memory accesses inserted by another instrumentation.
441 if (I->getMetadata("nosanitize")) return nullptr;
442
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000443 // Do not instrument the load fetching the dynamic shadow address.
444 if (LocalDynamicShadow == I)
445 return nullptr;
446
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000447 Value *PtrOperand = nullptr;
448 const DataLayout &DL = I->getModule()->getDataLayout();
449 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
450 if (!ClInstrumentReads) return nullptr;
451 *IsWrite = false;
452 *TypeSize = DL.getTypeStoreSizeInBits(LI->getType());
453 *Alignment = LI->getAlignment();
454 PtrOperand = LI->getPointerOperand();
455 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
456 if (!ClInstrumentWrites) return nullptr;
457 *IsWrite = true;
458 *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType());
459 *Alignment = SI->getAlignment();
460 PtrOperand = SI->getPointerOperand();
461 } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
462 if (!ClInstrumentAtomics) return nullptr;
463 *IsWrite = true;
464 *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType());
465 *Alignment = 0;
466 PtrOperand = RMW->getPointerOperand();
467 } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
468 if (!ClInstrumentAtomics) return nullptr;
469 *IsWrite = true;
470 *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType());
471 *Alignment = 0;
472 PtrOperand = XCHG->getPointerOperand();
473 }
474
475 if (PtrOperand) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000476 // Do not instrument accesses from different address spaces; we cannot deal
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000477 // with them.
478 Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType());
479 if (PtrTy->getPointerAddressSpace() != 0)
480 return nullptr;
481
482 // Ignore swifterror addresses.
483 // swifterror memory addresses are mem2reg promoted by instruction
484 // selection. As such they cannot have regular uses like an instrumentation
485 // function and it makes no sense to track them as memory.
486 if (PtrOperand->isSwiftError())
487 return nullptr;
488 }
489
490 return PtrOperand;
491}
492
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000493static unsigned getPointerOperandIndex(Instruction *I) {
494 if (LoadInst *LI = dyn_cast<LoadInst>(I))
495 return LI->getPointerOperandIndex();
496 if (StoreInst *SI = dyn_cast<StoreInst>(I))
497 return SI->getPointerOperandIndex();
498 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I))
499 return RMW->getPointerOperandIndex();
500 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I))
501 return XCHG->getPointerOperandIndex();
502 report_fatal_error("Unexpected instruction");
503 return -1;
504}
505
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000506static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
507 size_t Res = countTrailingZeros(TypeSize / 8);
508 assert(Res < kNumberOfAccessSizes);
509 return Res;
510}
511
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000512void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) {
513 if (TargetTriple.isAArch64())
514 return;
515
516 IRBuilder<> IRB(I);
517 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
518 Value *UntaggedPtr =
519 IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType());
520 I->setOperand(getPointerOperandIndex(I), UntaggedPtr);
521}
522
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000523Value *HWAddressSanitizer::shadowBase() {
524 if (LocalDynamicShadow)
525 return LocalDynamicShadow;
526 return ConstantExpr::getIntToPtr(ConstantInt::get(IntptrTy, Mapping.Offset),
527 Int8PtrTy);
528}
529
530Value *HWAddressSanitizer::memToShadow(Value *Mem, IRBuilder<> &IRB) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000531 // Mem >> Scale
532 Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale);
533 if (Mapping.Offset == 0)
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000534 return IRB.CreateIntToPtr(Shadow, Int8PtrTy);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000535 // (Mem >> Scale) + Offset
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000536 return IRB.CreateGEP(Int8Ty, shadowBase(), Shadow);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000537}
538
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000539void HWAddressSanitizer::instrumentMemAccessInline(Value *Ptr, bool IsWrite,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000540 unsigned AccessSizeIndex,
541 Instruction *InsertBefore) {
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000542 const int64_t AccessInfo = Recover * 0x20 + IsWrite * 0x10 + AccessSizeIndex;
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000543 IRBuilder<> IRB(InsertBefore);
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000544
545 if (!ClInlineAllChecks && TargetTriple.isAArch64() &&
546 TargetTriple.isOSBinFormatELF() && !Recover) {
547 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
548 Ptr = IRB.CreateBitCast(Ptr, Int8PtrTy);
549 IRB.CreateCall(
550 Intrinsic::getDeclaration(M, Intrinsic::hwasan_check_memaccess),
551 {shadowBase(), Ptr, ConstantInt::get(Int32Ty, AccessInfo)});
552 return;
553 }
554
555 Value *PtrLong = IRB.CreatePointerCast(Ptr, IntptrTy);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000556 Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, kPointerTagShift),
557 IRB.getInt8Ty());
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000558 Value *AddrLong = untagPointer(IRB, PtrLong);
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000559 Value *Shadow = memToShadow(AddrLong, IRB);
560 Value *MemTag = IRB.CreateLoad(Shadow);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000561 Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag);
562
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000563 int matchAllTag = ClMatchAllTag.getNumOccurrences() > 0 ?
564 ClMatchAllTag : (CompileKernel ? 0xFF : -1);
565 if (matchAllTag != -1) {
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000566 Value *TagNotIgnored = IRB.CreateICmpNE(PtrTag,
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000567 ConstantInt::get(PtrTag->getType(), matchAllTag));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000568 TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored);
569 }
570
Chandler Carruth4a2d58e2018-10-15 09:34:05 +0000571 Instruction *CheckTerm =
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000572 SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, !Recover,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000573 MDBuilder(*C).createBranchWeights(1, 100000));
574
575 IRB.SetInsertPoint(CheckTerm);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000576 InlineAsm *Asm;
577 switch (TargetTriple.getArch()) {
578 case Triple::x86_64:
579 // The signal handler will find the data address in rdi.
580 Asm = InlineAsm::get(
581 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
582 "int3\nnopl " + itostr(0x40 + AccessInfo) + "(%rax)",
583 "{rdi}",
584 /*hasSideEffects=*/true);
585 break;
586 case Triple::aarch64:
587 case Triple::aarch64_be:
588 // The signal handler will find the data address in x0.
589 Asm = InlineAsm::get(
590 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
591 "brk #" + itostr(0x900 + AccessInfo),
592 "{x0}",
593 /*hasSideEffects=*/true);
594 break;
595 default:
596 report_fatal_error("unsupported architecture");
597 }
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000598 IRB.CreateCall(Asm, PtrLong);
599}
600
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000601void HWAddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
602 IRBuilder<> IRB(MI);
603 if (isa<MemTransferInst>(MI)) {
604 IRB.CreateCall(
605 isa<MemMoveInst>(MI) ? HWAsanMemmove : HWAsanMemcpy,
606 {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
607 IRB.CreatePointerCast(MI->getOperand(1), IRB.getInt8PtrTy()),
608 IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
609 } else if (isa<MemSetInst>(MI)) {
610 IRB.CreateCall(
611 HWAsanMemset,
612 {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
613 IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false),
614 IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
615 }
616 MI->eraseFromParent();
617}
618
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000619bool HWAddressSanitizer::instrumentMemAccess(Instruction *I) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000620 LLVM_DEBUG(dbgs() << "Instrumenting: " << *I << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000621 bool IsWrite = false;
622 unsigned Alignment = 0;
623 uint64_t TypeSize = 0;
624 Value *MaybeMask = nullptr;
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000625
626 if (ClInstrumentMemIntrinsics && isa<MemIntrinsic>(I)) {
627 instrumentMemIntrinsic(cast<MemIntrinsic>(I));
628 return true;
629 }
630
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000631 Value *Addr =
632 isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask);
633
634 if (!Addr)
635 return false;
636
637 if (MaybeMask)
638 return false; //FIXME
639
640 IRBuilder<> IRB(I);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000641 if (isPowerOf2_64(TypeSize) &&
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000642 (TypeSize / 8 <= (1UL << (kNumberOfAccessSizes - 1))) &&
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000643 (Alignment >= (1UL << Mapping.Scale) || Alignment == 0 ||
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000644 Alignment >= TypeSize / 8)) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000645 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000646 if (ClInstrumentWithCalls) {
647 IRB.CreateCall(HwasanMemoryAccessCallback[IsWrite][AccessSizeIndex],
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000648 IRB.CreatePointerCast(Addr, IntptrTy));
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000649 } else {
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000650 instrumentMemAccessInline(Addr, IsWrite, AccessSizeIndex, I);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000651 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000652 } else {
653 IRB.CreateCall(HwasanMemoryAccessCallbackSized[IsWrite],
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000654 {IRB.CreatePointerCast(Addr, IntptrTy),
655 ConstantInt::get(IntptrTy, TypeSize / 8)});
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000656 }
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000657 untagPointerOperand(I, Addr);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000658
659 return true;
660}
661
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000662static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) {
663 uint64_t ArraySize = 1;
664 if (AI.isArrayAllocation()) {
665 const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
666 assert(CI && "non-constant array size");
667 ArraySize = CI->getZExtValue();
668 }
669 Type *Ty = AI.getAllocatedType();
670 uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
671 return SizeInBytes * ArraySize;
672}
673
674bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI,
675 Value *Tag) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000676 size_t Size = (getAllocaSizeInBytes(*AI) + Mapping.getAllocaAlignment() - 1) &
677 ~(Mapping.getAllocaAlignment() - 1);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000678
679 Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty());
680 if (ClInstrumentWithCalls) {
681 IRB.CreateCall(HwasanTagMemoryFunc,
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000682 {IRB.CreatePointerCast(AI, Int8PtrTy), JustTag,
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000683 ConstantInt::get(IntptrTy, Size)});
684 } else {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000685 size_t ShadowSize = Size >> Mapping.Scale;
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000686 Value *ShadowPtr = memToShadow(IRB.CreatePointerCast(AI, IntptrTy), IRB);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000687 // If this memset is not inlined, it will be intercepted in the hwasan
688 // runtime library. That's OK, because the interceptor skips the checks if
689 // the address is in the shadow region.
690 // FIXME: the interceptor is not as fast as real memset. Consider lowering
691 // llvm.memset right here into either a sequence of stores, or a call to
692 // hwasan_tag_memory.
693 IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, /*Align=*/1);
694 }
695 return true;
696}
697
698static unsigned RetagMask(unsigned AllocaNo) {
699 // A list of 8-bit numbers that have at most one run of non-zero bits.
700 // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these
701 // masks.
702 // The list does not include the value 255, which is used for UAR.
703 static unsigned FastMasks[] = {
704 0, 1, 2, 3, 4, 6, 7, 8, 12, 14, 15, 16, 24,
705 28, 30, 31, 32, 48, 56, 60, 62, 63, 64, 96, 112, 120,
706 124, 126, 127, 128, 192, 224, 240, 248, 252, 254};
707 return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))];
708}
709
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000710Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) {
711 return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy);
712}
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000713
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000714Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) {
715 if (ClGenerateTagsWithCalls)
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000716 return getNextTagWithCall(IRB);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000717 // FIXME: use addressofreturnaddress (but implement it in aarch64 backend
718 // first).
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000719 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
720 auto GetStackPointerFn =
721 Intrinsic::getDeclaration(M, Intrinsic::frameaddress);
722 Value *StackPointer = IRB.CreateCall(
723 GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())});
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000724
725 // Extract some entropy from the stack pointer for the tags.
726 // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ
727 // between functions).
728 Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy);
729 Value *StackTag =
730 IRB.CreateXor(StackPointerLong, IRB.CreateLShr(StackPointerLong, 20),
731 "hwasan.stack.base.tag");
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000732 return StackTag;
733}
734
735Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag,
736 AllocaInst *AI, unsigned AllocaNo) {
737 if (ClGenerateTagsWithCalls)
738 return getNextTagWithCall(IRB);
739 return IRB.CreateXor(StackTag,
740 ConstantInt::get(IntptrTy, RetagMask(AllocaNo)));
741}
742
743Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) {
Alex Shlyapnikov788764c2018-06-29 20:20:17 +0000744 if (ClUARRetagToZero)
745 return ConstantInt::get(IntptrTy, 0);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000746 if (ClGenerateTagsWithCalls)
747 return getNextTagWithCall(IRB);
748 return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, 0xFFU));
749}
750
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000751// Add a tag to an address.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000752Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty,
753 Value *PtrLong, Value *Tag) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000754 Value *TaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000755 if (CompileKernel) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000756 // Kernel addresses have 0xFF in the most significant byte.
757 Value *ShiftedTag = IRB.CreateOr(
758 IRB.CreateShl(Tag, kPointerTagShift),
759 ConstantInt::get(IntptrTy, (1ULL << kPointerTagShift) - 1));
760 TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag);
761 } else {
762 // Userspace can simply do OR (tag << 56);
763 Value *ShiftedTag = IRB.CreateShl(Tag, kPointerTagShift);
764 TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag);
765 }
766 return IRB.CreateIntToPtr(TaggedPtrLong, Ty);
767}
768
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000769// Remove tag from an address.
770Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) {
771 Value *UntaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000772 if (CompileKernel) {
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000773 // Kernel addresses have 0xFF in the most significant byte.
774 UntaggedPtrLong = IRB.CreateOr(PtrLong,
775 ConstantInt::get(PtrLong->getType(), 0xFFULL << kPointerTagShift));
776 } else {
777 // Userspace addresses have 0x00.
778 UntaggedPtrLong = IRB.CreateAnd(PtrLong,
779 ConstantInt::get(PtrLong->getType(), ~(0xFFULL << kPointerTagShift)));
780 }
781 return UntaggedPtrLong;
782}
783
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000784Value *HWAddressSanitizer::getHwasanThreadSlotPtr(IRBuilder<> &IRB, Type *Ty) {
785 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
786 if (TargetTriple.isAArch64() && TargetTriple.isAndroid()) {
Evgeniy Stepanov0184c532019-01-05 00:44:58 +0000787 // Android provides a fixed TLS slot for sanitizers. See TLS_SLOT_SANITIZER
788 // in Bionic's libc/private/bionic_tls.h.
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000789 Function *ThreadPointerFunc =
790 Intrinsic::getDeclaration(M, Intrinsic::thread_pointer);
791 Value *SlotPtr = IRB.CreatePointerCast(
Evgeniy Stepanov0184c532019-01-05 00:44:58 +0000792 IRB.CreateConstGEP1_32(IRB.CreateCall(ThreadPointerFunc), 0x30),
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000793 Ty->getPointerTo(0));
794 return SlotPtr;
795 }
796 if (ThreadPtrGlobal)
797 return ThreadPtrGlobal;
798
799
800 return nullptr;
801}
802
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000803// Creates a string with a description of the stack frame (set of Allocas).
804// The string is intended to be human readable.
805// The current form is: Size1 Name1; Size2 Name2; ...
806std::string
807HWAddressSanitizer::createFrameString(ArrayRef<AllocaInst *> Allocas) {
808 std::ostringstream Descr;
809 for (auto AI : Allocas)
810 Descr << getAllocaSizeInBytes(*AI) << " " << AI->getName().str() << "; ";
811 return Descr.str();
812}
813
814// Creates a global in the frame section which consists of two pointers:
815// the function PC and the frame string constant.
816void HWAddressSanitizer::createFrameGlobal(Function &F,
817 const std::string &FrameString) {
818 Module &M = *F.getParent();
819 auto DescrGV = createPrivateGlobalForString(M, FrameString, true);
820 auto PtrPairTy = StructType::get(F.getType(), DescrGV->getType());
821 auto GV = new GlobalVariable(
822 M, PtrPairTy, /*isConstantGlobal*/ true, GlobalVariable::PrivateLinkage,
823 ConstantStruct::get(PtrPairTy, (Constant *)&F, (Constant *)DescrGV),
824 "__hwasan");
825 GV->setSection(getFrameSection());
826 appendToCompilerUsed(M, GV);
827 // Put GV into the F's Comadat so that if F is deleted GV can be deleted too.
Peter Collingbourned3a3e4b2018-12-17 22:56:34 +0000828 if (auto Comdat =
829 GetOrCreateFunctionComdat(F, TargetTriple, CurModuleUniqueId))
830 GV->setComdat(Comdat);
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000831}
832
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000833Value *HWAddressSanitizer::emitPrologue(IRBuilder<> &IRB,
834 bool WithFrameRecord) {
835 if (!Mapping.InTls)
836 return getDynamicShadowNonTls(IRB);
837
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000838 if (ClAllowIfunc && !WithFrameRecord && TargetTriple.isAndroid())
839 return getDynamicShadowIfunc(IRB);
840
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000841 Value *SlotPtr = getHwasanThreadSlotPtr(IRB, IntptrTy);
842 assert(SlotPtr);
843
Peter Collingbourne87f477b2019-01-04 19:27:04 +0000844 Instruction *ThreadLong = IRB.CreateLoad(SlotPtr);
845
846 Function *F = IRB.GetInsertBlock()->getParent();
847 if (F->getFnAttribute("hwasan-abi").getValueAsString() == "interceptor") {
848 Value *ThreadLongEqZero =
849 IRB.CreateICmpEQ(ThreadLong, ConstantInt::get(IntptrTy, 0));
850 auto *Br = cast<BranchInst>(SplitBlockAndInsertIfThen(
851 ThreadLongEqZero, cast<Instruction>(ThreadLongEqZero)->getNextNode(),
852 false, MDBuilder(*C).createBranchWeights(1, 100000)));
853
854 IRB.SetInsertPoint(Br);
855 // FIXME: This should call a new runtime function with a custom calling
856 // convention to avoid needing to spill all arguments here.
857 IRB.CreateCall(HwasanThreadEnterFunc);
858 LoadInst *ReloadThreadLong = IRB.CreateLoad(SlotPtr);
859
860 IRB.SetInsertPoint(&*Br->getSuccessor(0)->begin());
861 PHINode *ThreadLongPhi = IRB.CreatePHI(IntptrTy, 2);
862 ThreadLongPhi->addIncoming(ThreadLong, ThreadLong->getParent());
863 ThreadLongPhi->addIncoming(ReloadThreadLong, ReloadThreadLong->getParent());
864 ThreadLong = ThreadLongPhi;
865 }
866
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000867 // Extract the address field from ThreadLong. Unnecessary on AArch64 with TBI.
868 Value *ThreadLongMaybeUntagged =
869 TargetTriple.isAArch64() ? ThreadLong : untagPointer(IRB, ThreadLong);
870
871 if (WithFrameRecord) {
872 // Prepare ring buffer data.
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000873 auto PC = IRB.CreatePtrToInt(F, IntptrTy);
874 auto GetStackPointerFn =
875 Intrinsic::getDeclaration(F->getParent(), Intrinsic::frameaddress);
876 Value *SP = IRB.CreatePtrToInt(
877 IRB.CreateCall(GetStackPointerFn,
878 {Constant::getNullValue(IRB.getInt32Ty())}),
879 IntptrTy);
880 // Mix SP and PC. TODO: also add the tag to the mix.
881 // Assumptions:
882 // PC is 0x0000PPPPPPPPPPPP (48 bits are meaningful, others are zero)
883 // SP is 0xsssssssssssSSSS0 (4 lower bits are zero)
884 // We only really need ~20 lower non-zero bits (SSSS), so we mix like this:
885 // 0xSSSSPPPPPPPPPPPP
886 SP = IRB.CreateShl(SP, 44);
887
888 // Store data to ring buffer.
889 Value *RecordPtr =
890 IRB.CreateIntToPtr(ThreadLongMaybeUntagged, IntptrTy->getPointerTo(0));
891 IRB.CreateStore(IRB.CreateOr(PC, SP), RecordPtr);
892
893 // Update the ring buffer. Top byte of ThreadLong defines the size of the
894 // buffer in pages, it must be a power of two, and the start of the buffer
895 // must be aligned by twice that much. Therefore wrap around of the ring
896 // buffer is simply Addr &= ~((ThreadLong >> 56) << 12).
897 // The use of AShr instead of LShr is due to
898 // https://bugs.llvm.org/show_bug.cgi?id=39030
899 // Runtime library makes sure not to use the highest bit.
900 Value *WrapMask = IRB.CreateXor(
901 IRB.CreateShl(IRB.CreateAShr(ThreadLong, 56), 12, "", true, true),
902 ConstantInt::get(IntptrTy, (uint64_t)-1));
903 Value *ThreadLongNew = IRB.CreateAnd(
904 IRB.CreateAdd(ThreadLong, ConstantInt::get(IntptrTy, 8)), WrapMask);
905 IRB.CreateStore(ThreadLongNew, SlotPtr);
906 }
907
908 // Get shadow base address by aligning RecordPtr up.
909 // Note: this is not correct if the pointer is already aligned.
910 // Runtime library will make sure this never happens.
911 Value *ShadowBase = IRB.CreateAdd(
912 IRB.CreateOr(
913 ThreadLongMaybeUntagged,
914 ConstantInt::get(IntptrTy, (1ULL << kShadowBaseAlignment) - 1)),
915 ConstantInt::get(IntptrTy, 1), "hwasan.shadow");
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000916 ShadowBase = IRB.CreateIntToPtr(ShadowBase, Int8PtrTy);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000917 return ShadowBase;
918}
919
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000920bool HWAddressSanitizer::instrumentStack(
921 SmallVectorImpl<AllocaInst *> &Allocas,
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000922 SmallVectorImpl<Instruction *> &RetVec, Value *StackTag) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000923 // Ideally, we want to calculate tagged stack base pointer, and rewrite all
924 // alloca addresses using that. Unfortunately, offsets are not known yet
925 // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a
926 // temp, shift-OR it into each alloca address and xor with the retag mask.
927 // This generates one extra instruction per alloca use.
928 for (unsigned N = 0; N < Allocas.size(); ++N) {
929 auto *AI = Allocas[N];
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000930 IRBuilder<> IRB(AI->getNextNode());
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000931
932 // Replace uses of the alloca with tagged address.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000933 Value *Tag = getAllocaTag(IRB, StackTag, AI, N);
934 Value *AILong = IRB.CreatePointerCast(AI, IntptrTy);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000935 Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000936 std::string Name =
937 AI->hasName() ? AI->getName().str() : "alloca." + itostr(N);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000938 Replacement->setName(Name + ".hwasan");
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000939
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000940 for (auto UI = AI->use_begin(), UE = AI->use_end(); UI != UE;) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000941 Use &U = *UI++;
942 if (U.getUser() != AILong)
943 U.set(Replacement);
944 }
945
946 tagAlloca(IRB, AI, Tag);
947
948 for (auto RI : RetVec) {
949 IRB.SetInsertPoint(RI);
950
951 // Re-tag alloca memory with the special UAR tag.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000952 Value *Tag = getUARTag(IRB, StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000953 tagAlloca(IRB, AI, Tag);
954 }
955 }
956
957 return true;
958}
959
960bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
961 return (AI.getAllocatedType()->isSized() &&
962 // FIXME: instrument dynamic allocas, too
963 AI.isStaticAlloca() &&
964 // alloca() may be called with 0 size, ignore it.
965 getAllocaSizeInBytes(AI) > 0 &&
966 // We are only interested in allocas not promotable to registers.
967 // Promotable allocas are common under -O0.
968 !isAllocaPromotable(&AI) &&
969 // inalloca allocas are not treated as static, and we don't want
970 // dynamic alloca instrumentation for them as well.
971 !AI.isUsedWithInAlloca() &&
972 // swifterror allocas are register promoted by ISel
973 !AI.isSwiftError());
974}
975
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000976bool HWAddressSanitizer::runOnFunction(Function &F) {
977 if (&F == HwasanCtorFunction)
978 return false;
979
980 if (!F.hasFnAttribute(Attribute::SanitizeHWAddress))
981 return false;
982
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000983 LLVM_DEBUG(dbgs() << "Function: " << F.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000984
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000985 SmallVector<Instruction*, 16> ToInstrument;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000986 SmallVector<AllocaInst*, 8> AllocasToInstrument;
987 SmallVector<Instruction*, 8> RetVec;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000988 for (auto &BB : F) {
989 for (auto &Inst : BB) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000990 if (ClInstrumentStack)
991 if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) {
992 // Realign all allocas. We don't want small uninteresting allocas to
993 // hide in instrumented alloca's padding.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000994 if (AI->getAlignment() < Mapping.getAllocaAlignment())
995 AI->setAlignment(Mapping.getAllocaAlignment());
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000996 // Instrument some of them.
997 if (isInterestingAlloca(*AI))
998 AllocasToInstrument.push_back(AI);
999 continue;
1000 }
1001
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001002 if (isa<ReturnInst>(Inst) || isa<ResumeInst>(Inst) ||
1003 isa<CleanupReturnInst>(Inst))
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +00001004 RetVec.push_back(&Inst);
1005
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001006 Value *MaybeMask = nullptr;
1007 bool IsWrite;
1008 unsigned Alignment;
1009 uint64_t TypeSize;
1010 Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize,
1011 &Alignment, &MaybeMask);
1012 if (Addr || isa<MemIntrinsic>(Inst))
1013 ToInstrument.push_back(&Inst);
1014 }
1015 }
1016
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001017 if (AllocasToInstrument.empty() && ToInstrument.empty())
1018 return false;
1019
Kostya Serebryanyaf955972018-10-23 00:50:40 +00001020 if (ClCreateFrameDescriptions && !AllocasToInstrument.empty())
1021 createFrameGlobal(F, createFrameString(AllocasToInstrument));
1022
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001023 initializeCallbacks(*F.getParent());
1024
1025 assert(!LocalDynamicShadow);
1026
1027 Instruction *InsertPt = &*F.getEntryBlock().begin();
1028 IRBuilder<> EntryIRB(InsertPt);
1029 LocalDynamicShadow = emitPrologue(EntryIRB,
1030 /*WithFrameRecord*/ ClRecordStackHistory &&
1031 !AllocasToInstrument.empty());
1032
1033 bool Changed = false;
1034 if (!AllocasToInstrument.empty()) {
1035 Value *StackTag =
1036 ClGenerateTagsWithCalls ? nullptr : getStackBaseTag(EntryIRB);
1037 Changed |= instrumentStack(AllocasToInstrument, RetVec, StackTag);
1038 }
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +00001039
Peter Collingbourne1a8acfb2019-01-25 02:08:46 +00001040 // If we split the entry block, move any allocas that were originally in the
1041 // entry block back into the entry block so that they aren't treated as
1042 // dynamic allocas.
1043 if (EntryIRB.GetInsertBlock() != &F.getEntryBlock()) {
1044 InsertPt = &*F.getEntryBlock().begin();
1045 for (auto II = EntryIRB.GetInsertBlock()->begin(),
1046 IE = EntryIRB.GetInsertBlock()->end();
1047 II != IE;) {
1048 Instruction *I = &*II++;
1049 if (auto *AI = dyn_cast<AllocaInst>(I))
1050 if (isa<ConstantInt>(AI->getArraySize()))
1051 I->moveBefore(InsertPt);
1052 }
1053 }
1054
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001055 for (auto Inst : ToInstrument)
1056 Changed |= instrumentMemAccess(Inst);
1057
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001058 LocalDynamicShadow = nullptr;
1059
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001060 return Changed;
1061}
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001062
1063void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001064 Scale = kDefaultShadowScale;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001065 if (ClMappingOffset.getNumOccurrences() > 0) {
1066 InGlobal = false;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001067 InTls = false;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001068 Offset = ClMappingOffset;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001069 } else if (ClEnableKhwasan || ClInstrumentWithCalls) {
1070 InGlobal = false;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001071 InTls = false;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001072 Offset = 0;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001073 } else if (ClWithIfunc) {
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001074 InGlobal = true;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001075 InTls = false;
1076 Offset = kDynamicShadowSentinel;
1077 } else if (ClWithTls) {
1078 InGlobal = false;
1079 InTls = true;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001080 Offset = kDynamicShadowSentinel;
1081 } else {
1082 InGlobal = false;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001083 InTls = false;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001084 Offset = kDynamicShadowSentinel;
1085 }
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001086}