blob: 578e6fb57465fb44fa45f887f67042d43f697d88 [file] [log] [blame]
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001//===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00006//
7//===----------------------------------------------------------------------===//
8//
9/// \file
10/// This file is a part of HWAddressSanitizer, an address sanity checker
11/// based on tagged addressing.
12//===----------------------------------------------------------------------===//
13
Leonard Chan0cdd3b12019-05-14 21:17:21 +000014#include "llvm/Transforms/Instrumentation/HWAddressSanitizer.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000015#include "llvm/ADT/SmallVector.h"
16#include "llvm/ADT/StringExtras.h"
17#include "llvm/ADT/StringRef.h"
18#include "llvm/ADT/Triple.h"
19#include "llvm/IR/Attributes.h"
20#include "llvm/IR/BasicBlock.h"
21#include "llvm/IR/Constant.h"
22#include "llvm/IR/Constants.h"
23#include "llvm/IR/DataLayout.h"
Peter Collingbournefb9ce102019-06-17 23:39:41 +000024#include "llvm/IR/DebugInfoMetadata.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000025#include "llvm/IR/DerivedTypes.h"
26#include "llvm/IR/Function.h"
27#include "llvm/IR/IRBuilder.h"
28#include "llvm/IR/InlineAsm.h"
29#include "llvm/IR/InstVisitor.h"
30#include "llvm/IR/Instruction.h"
31#include "llvm/IR/Instructions.h"
32#include "llvm/IR/IntrinsicInst.h"
33#include "llvm/IR/Intrinsics.h"
34#include "llvm/IR/LLVMContext.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000035#include "llvm/IR/MDBuilder.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000036#include "llvm/IR/Module.h"
37#include "llvm/IR/Type.h"
38#include "llvm/IR/Value.h"
39#include "llvm/Pass.h"
40#include "llvm/Support/Casting.h"
41#include "llvm/Support/CommandLine.h"
42#include "llvm/Support/Debug.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000043#include "llvm/Support/raw_ostream.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000044#include "llvm/Transforms/Instrumentation.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000045#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000046#include "llvm/Transforms/Utils/ModuleUtils.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000047#include "llvm/Transforms/Utils/PromoteMemToReg.h"
Kostya Serebryanyaf955972018-10-23 00:50:40 +000048#include <sstream>
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000049
50using namespace llvm;
51
52#define DEBUG_TYPE "hwasan"
53
54static const char *const kHwasanModuleCtorName = "hwasan.module_ctor";
55static const char *const kHwasanInitName = "__hwasan_init";
56
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000057static const char *const kHwasanShadowMemoryDynamicAddress =
58 "__hwasan_shadow_memory_dynamic_address";
59
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000060// Accesses sizes are powers of two: 1, 2, 4, 8, 16.
61static const size_t kNumberOfAccessSizes = 5;
62
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000063static const size_t kDefaultShadowScale = 4;
64static const uint64_t kDynamicShadowSentinel =
65 std::numeric_limits<uint64_t>::max();
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000066static const unsigned kPointerTagShift = 56;
67
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +000068static const unsigned kShadowBaseAlignment = 32;
69
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000070static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
71 "hwasan-memory-access-callback-prefix",
72 cl::desc("Prefix for memory access callbacks"), cl::Hidden,
73 cl::init("__hwasan_"));
74
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000075static cl::opt<bool>
76 ClInstrumentWithCalls("hwasan-instrument-with-calls",
77 cl::desc("instrument reads and writes with callbacks"),
78 cl::Hidden, cl::init(false));
79
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000080static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads",
81 cl::desc("instrument read instructions"),
82 cl::Hidden, cl::init(true));
83
84static cl::opt<bool> ClInstrumentWrites(
85 "hwasan-instrument-writes", cl::desc("instrument write instructions"),
86 cl::Hidden, cl::init(true));
87
88static cl::opt<bool> ClInstrumentAtomics(
89 "hwasan-instrument-atomics",
90 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
91 cl::init(true));
92
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +000093static cl::opt<bool> ClRecover(
94 "hwasan-recover",
95 cl::desc("Enable recovery mode (continue-after-error)."),
96 cl::Hidden, cl::init(false));
97
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000098static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack",
99 cl::desc("instrument stack (allocas)"),
100 cl::Hidden, cl::init(true));
101
Alex Shlyapnikov788764c2018-06-29 20:20:17 +0000102static cl::opt<bool> ClUARRetagToZero(
103 "hwasan-uar-retag-to-zero",
104 cl::desc("Clear alloca tags before returning from the function to allow "
105 "non-instrumented and instrumented function calls mix. When set "
106 "to false, allocas are retagged before returning from the "
107 "function to detect use after return."),
108 cl::Hidden, cl::init(true));
109
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000110static cl::opt<bool> ClGenerateTagsWithCalls(
111 "hwasan-generate-tags-with-calls",
112 cl::desc("generate new tags with runtime library calls"), cl::Hidden,
113 cl::init(false));
114
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000115static cl::opt<int> ClMatchAllTag(
116 "hwasan-match-all-tag",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000117 cl::desc("don't report bad accesses via pointers with this tag"),
118 cl::Hidden, cl::init(-1));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000119
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000120static cl::opt<bool> ClEnableKhwasan(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000121 "hwasan-kernel",
122 cl::desc("Enable KernelHWAddressSanitizer instrumentation"),
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000123 cl::Hidden, cl::init(false));
124
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000125// These flags allow to change the shadow mapping and control how shadow memory
126// is accessed. The shadow mapping looks like:
127// Shadow = (Mem >> scale) + offset
128
Fangrui Songb5f39842019-04-24 02:40:20 +0000129static cl::opt<uint64_t>
130 ClMappingOffset("hwasan-mapping-offset",
131 cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"),
132 cl::Hidden, cl::init(0));
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000133
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000134static cl::opt<bool>
135 ClWithIfunc("hwasan-with-ifunc",
136 cl::desc("Access dynamic shadow through an ifunc global on "
137 "platforms that support this"),
138 cl::Hidden, cl::init(false));
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000139
140static cl::opt<bool> ClWithTls(
141 "hwasan-with-tls",
142 cl::desc("Access dynamic shadow through an thread-local pointer on "
143 "platforms that support this"),
144 cl::Hidden, cl::init(true));
145
146static cl::opt<bool>
147 ClRecordStackHistory("hwasan-record-stack-history",
148 cl::desc("Record stack frames with tagged allocations "
149 "in a thread-local ring buffer"),
150 cl::Hidden, cl::init(true));
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000151static cl::opt<bool>
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000152 ClInstrumentMemIntrinsics("hwasan-instrument-mem-intrinsics",
153 cl::desc("instrument memory intrinsics"),
Eugene Leviant4dc3a3f2018-12-24 16:02:48 +0000154 cl::Hidden, cl::init(true));
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000155
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000156static cl::opt<bool>
157 ClInstrumentLandingPads("hwasan-instrument-landing-pads",
158 cl::desc("instrument landing pads"), cl::Hidden,
159 cl::init(true));
160
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000161static cl::opt<bool> ClInlineAllChecks("hwasan-inline-all-checks",
162 cl::desc("inline all checks"),
163 cl::Hidden, cl::init(false));
164
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000165namespace {
166
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000167/// An instrumentation pass implementing detection of addressability bugs
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000168/// using tagged pointers.
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000169class HWAddressSanitizer {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000170public:
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000171 explicit HWAddressSanitizer(Module &M, bool CompileKernel = false,
172 bool Recover = false) {
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000173 this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover;
174 this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0 ?
175 ClEnableKhwasan : CompileKernel;
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000176
177 initializeModule(M);
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000178 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000179
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000180 bool sanitizeFunction(Function &F);
181 void initializeModule(Module &M);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000182
183 void initializeCallbacks(Module &M);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000184
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000185 Value *getDynamicShadowIfunc(IRBuilder<> &IRB);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000186 Value *getDynamicShadowNonTls(IRBuilder<> &IRB);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000187
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000188 void untagPointerOperand(Instruction *I, Value *Addr);
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000189 Value *shadowBase();
190 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
191 void instrumentMemAccessInline(Value *Ptr, bool IsWrite,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000192 unsigned AccessSizeIndex,
193 Instruction *InsertBefore);
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000194 void instrumentMemIntrinsic(MemIntrinsic *MI);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000195 bool instrumentMemAccess(Instruction *I);
196 Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite,
197 uint64_t *TypeSize, unsigned *Alignment,
198 Value **MaybeMask);
199
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000200 bool isInterestingAlloca(const AllocaInst &AI);
201 bool tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000202 Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag);
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000203 Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong);
Peter Collingbournefb9ce102019-06-17 23:39:41 +0000204 bool instrumentStack(
205 SmallVectorImpl<AllocaInst *> &Allocas,
206 DenseMap<AllocaInst *, std::vector<DbgDeclareInst *>> &AllocaDeclareMap,
207 SmallVectorImpl<Instruction *> &RetVec, Value *StackTag);
Peter Collingbourne5378afc2019-06-27 23:24:07 +0000208 Value *readRegister(IRBuilder<> &IRB, StringRef Name);
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000209 bool instrumentLandingPads(SmallVectorImpl<Instruction *> &RetVec);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000210 Value *getNextTagWithCall(IRBuilder<> &IRB);
211 Value *getStackBaseTag(IRBuilder<> &IRB);
212 Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI,
213 unsigned AllocaNo);
214 Value *getUARTag(IRBuilder<> &IRB, Value *StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000215
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000216 Value *getHwasanThreadSlotPtr(IRBuilder<> &IRB, Type *Ty);
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000217 void emitPrologue(IRBuilder<> &IRB, bool WithFrameRecord);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000218
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000219private:
220 LLVMContext *C;
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000221 std::string CurModuleUniqueId;
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000222 Triple TargetTriple;
James Y Knight13680222019-02-01 02:28:03 +0000223 FunctionCallee HWAsanMemmove, HWAsanMemcpy, HWAsanMemset;
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000224 FunctionCallee HWAsanHandleVfork;
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000225
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000226 /// This struct defines the shadow mapping using the rule:
227 /// shadow = (mem >> Scale) + Offset.
228 /// If InGlobal is true, then
229 /// extern char __hwasan_shadow[];
230 /// shadow = (mem >> Scale) + &__hwasan_shadow
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000231 /// If InTls is true, then
232 /// extern char *__hwasan_tls;
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000233 /// shadow = (mem>>Scale) + align_up(__hwasan_shadow, kShadowBaseAlignment)
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000234 struct ShadowMapping {
235 int Scale;
236 uint64_t Offset;
237 bool InGlobal;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000238 bool InTls;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000239
240 void init(Triple &TargetTriple);
241 unsigned getAllocaAlignment() const { return 1U << Scale; }
242 };
243 ShadowMapping Mapping;
244
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000245 Type *IntptrTy;
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000246 Type *Int8PtrTy;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000247 Type *Int8Ty;
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000248 Type *Int32Ty;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000249
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000250 bool CompileKernel;
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000251 bool Recover;
252
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000253 Function *HwasanCtorFunction;
254
James Y Knight13680222019-02-01 02:28:03 +0000255 FunctionCallee HwasanMemoryAccessCallback[2][kNumberOfAccessSizes];
256 FunctionCallee HwasanMemoryAccessCallbackSized[2];
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000257
James Y Knight13680222019-02-01 02:28:03 +0000258 FunctionCallee HwasanTagMemoryFunc;
259 FunctionCallee HwasanGenerateTagFunc;
260 FunctionCallee HwasanThreadEnterFunc;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000261
262 Constant *ShadowGlobal;
263
264 Value *LocalDynamicShadow = nullptr;
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000265 Value *StackBaseTag = nullptr;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000266 GlobalValue *ThreadPtrGlobal = nullptr;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000267};
268
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000269class HWAddressSanitizerLegacyPass : public FunctionPass {
270public:
271 // Pass identification, replacement for typeid.
272 static char ID;
273
274 explicit HWAddressSanitizerLegacyPass(bool CompileKernel = false,
275 bool Recover = false)
276 : FunctionPass(ID), CompileKernel(CompileKernel), Recover(Recover) {}
277
278 StringRef getPassName() const override { return "HWAddressSanitizer"; }
279
280 bool runOnFunction(Function &F) override {
281 HWAddressSanitizer HWASan(*F.getParent(), CompileKernel, Recover);
282 return HWASan.sanitizeFunction(F);
283 }
284
285private:
286 bool CompileKernel;
287 bool Recover;
288};
289
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000290} // end anonymous namespace
291
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000292char HWAddressSanitizerLegacyPass::ID = 0;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000293
294INITIALIZE_PASS_BEGIN(
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000295 HWAddressSanitizerLegacyPass, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000296 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
297 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000298INITIALIZE_PASS_END(
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000299 HWAddressSanitizerLegacyPass, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000300 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
301 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000302
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000303FunctionPass *llvm::createHWAddressSanitizerLegacyPassPass(bool CompileKernel,
304 bool Recover) {
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000305 assert(!CompileKernel || Recover);
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000306 return new HWAddressSanitizerLegacyPass(CompileKernel, Recover);
307}
308
309HWAddressSanitizerPass::HWAddressSanitizerPass(bool CompileKernel, bool Recover)
310 : CompileKernel(CompileKernel), Recover(Recover) {}
311
312PreservedAnalyses HWAddressSanitizerPass::run(Function &F,
313 FunctionAnalysisManager &FAM) {
314 HWAddressSanitizer HWASan(*F.getParent(), CompileKernel, Recover);
315 if (HWASan.sanitizeFunction(F))
316 return PreservedAnalyses::none();
317 return PreservedAnalyses::all();
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000318}
319
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000320/// Module-level initialization.
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000321///
322/// inserts a call to __hwasan_init to the module's constructor list.
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000323void HWAddressSanitizer::initializeModule(Module &M) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000324 LLVM_DEBUG(dbgs() << "Init " << M.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000325 auto &DL = M.getDataLayout();
326
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000327 TargetTriple = Triple(M.getTargetTriple());
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000328
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000329 Mapping.init(TargetTriple);
330
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000331 C = &(M.getContext());
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000332 CurModuleUniqueId = getUniqueModuleId(&M);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000333 IRBuilder<> IRB(*C);
334 IntptrTy = IRB.getIntPtrTy(DL);
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000335 Int8PtrTy = IRB.getInt8PtrTy();
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000336 Int8Ty = IRB.getInt8Ty();
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000337 Int32Ty = IRB.getInt32Ty();
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000338
Benjamin Kramerbfc1d972018-01-18 14:19:04 +0000339 HwasanCtorFunction = nullptr;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000340 if (!CompileKernel) {
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000341 std::tie(HwasanCtorFunction, std::ignore) =
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000342 getOrCreateSanitizerCtorAndInitFunctions(
343 M, kHwasanModuleCtorName, kHwasanInitName,
344 /*InitArgTypes=*/{},
345 /*InitArgs=*/{},
346 // This callback is invoked when the functions are created the first
347 // time. Hook them into the global ctors list in that case:
348 [&](Function *Ctor, FunctionCallee) {
349 Comdat *CtorComdat = M.getOrInsertComdat(kHwasanModuleCtorName);
350 Ctor->setComdat(CtorComdat);
351 appendToGlobalCtors(M, Ctor, 0, Ctor);
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000352 });
Kostya Serebryanyaf955972018-10-23 00:50:40 +0000353 }
354
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000355 if (!TargetTriple.isAndroid()) {
356 Constant *C = M.getOrInsertGlobal("__hwasan_tls", IntptrTy, [&] {
357 auto *GV = new GlobalVariable(M, IntptrTy, /*isConstantGlobal=*/false,
358 GlobalValue::ExternalLinkage, nullptr,
359 "__hwasan_tls", nullptr,
360 GlobalVariable::InitialExecTLSModel);
361 appendToCompilerUsed(M, GV);
362 return GV;
363 });
364 ThreadPtrGlobal = cast<GlobalVariable>(C);
365 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000366}
367
368void HWAddressSanitizer::initializeCallbacks(Module &M) {
369 IRBuilder<> IRB(*C);
370 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
371 const std::string TypeStr = AccessIsWrite ? "store" : "load";
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000372 const std::string EndingStr = Recover ? "_noabort" : "";
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000373
James Y Knight13680222019-02-01 02:28:03 +0000374 HwasanMemoryAccessCallbackSized[AccessIsWrite] = M.getOrInsertFunction(
375 ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr,
376 FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false));
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000377
378 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
379 AccessSizeIndex++) {
380 HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] =
James Y Knight13680222019-02-01 02:28:03 +0000381 M.getOrInsertFunction(
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000382 ClMemoryAccessCallbackPrefix + TypeStr +
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000383 itostr(1ULL << AccessSizeIndex) + EndingStr,
James Y Knight13680222019-02-01 02:28:03 +0000384 FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false));
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000385 }
386 }
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000387
James Y Knight13680222019-02-01 02:28:03 +0000388 HwasanTagMemoryFunc = M.getOrInsertFunction(
389 "__hwasan_tag_memory", IRB.getVoidTy(), Int8PtrTy, Int8Ty, IntptrTy);
390 HwasanGenerateTagFunc =
391 M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000392
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000393 ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow",
394 ArrayType::get(IRB.getInt8Ty(), 0));
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000395
396 const std::string MemIntrinCallbackPrefix =
397 CompileKernel ? std::string("") : ClMemoryAccessCallbackPrefix;
James Y Knight13680222019-02-01 02:28:03 +0000398 HWAsanMemmove = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memmove",
399 IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
400 IRB.getInt8PtrTy(), IntptrTy);
401 HWAsanMemcpy = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memcpy",
402 IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
403 IRB.getInt8PtrTy(), IntptrTy);
404 HWAsanMemset = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memset",
405 IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
406 IRB.getInt32Ty(), IntptrTy);
Peter Collingbourne87f477b2019-01-04 19:27:04 +0000407
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000408 HWAsanHandleVfork =
409 M.getOrInsertFunction("__hwasan_handle_vfork", IRB.getVoidTy(), IntptrTy);
410
James Y Knight13680222019-02-01 02:28:03 +0000411 HwasanThreadEnterFunc =
412 M.getOrInsertFunction("__hwasan_thread_enter", IRB.getVoidTy());
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000413}
414
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000415Value *HWAddressSanitizer::getDynamicShadowIfunc(IRBuilder<> &IRB) {
416 // An empty inline asm with input reg == output reg.
417 // An opaque no-op cast, basically.
418 InlineAsm *Asm = InlineAsm::get(
419 FunctionType::get(Int8PtrTy, {ShadowGlobal->getType()}, false),
420 StringRef(""), StringRef("=r,0"),
421 /*hasSideEffects=*/false);
422 return IRB.CreateCall(Asm, {ShadowGlobal}, ".hwasan.shadow");
423}
424
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000425Value *HWAddressSanitizer::getDynamicShadowNonTls(IRBuilder<> &IRB) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000426 // Generate code only when dynamic addressing is needed.
427 if (Mapping.Offset != kDynamicShadowSentinel)
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000428 return nullptr;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000429
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000430 if (Mapping.InGlobal) {
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000431 return getDynamicShadowIfunc(IRB);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000432 } else {
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000433 Value *GlobalDynamicAddress =
434 IRB.GetInsertBlock()->getParent()->getParent()->getOrInsertGlobal(
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000435 kHwasanShadowMemoryDynamicAddress, Int8PtrTy);
James Y Knight14359ef2019-02-01 20:44:24 +0000436 return IRB.CreateLoad(Int8PtrTy, GlobalDynamicAddress);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000437 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000438}
439
440Value *HWAddressSanitizer::isInterestingMemoryAccess(Instruction *I,
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000441 bool *IsWrite,
442 uint64_t *TypeSize,
443 unsigned *Alignment,
444 Value **MaybeMask) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000445 // Skip memory accesses inserted by another instrumentation.
446 if (I->getMetadata("nosanitize")) return nullptr;
447
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000448 // Do not instrument the load fetching the dynamic shadow address.
449 if (LocalDynamicShadow == I)
450 return nullptr;
451
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000452 Value *PtrOperand = nullptr;
453 const DataLayout &DL = I->getModule()->getDataLayout();
454 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
455 if (!ClInstrumentReads) return nullptr;
456 *IsWrite = false;
457 *TypeSize = DL.getTypeStoreSizeInBits(LI->getType());
458 *Alignment = LI->getAlignment();
459 PtrOperand = LI->getPointerOperand();
460 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
461 if (!ClInstrumentWrites) return nullptr;
462 *IsWrite = true;
463 *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType());
464 *Alignment = SI->getAlignment();
465 PtrOperand = SI->getPointerOperand();
466 } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
467 if (!ClInstrumentAtomics) return nullptr;
468 *IsWrite = true;
469 *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType());
470 *Alignment = 0;
471 PtrOperand = RMW->getPointerOperand();
472 } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
473 if (!ClInstrumentAtomics) return nullptr;
474 *IsWrite = true;
475 *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType());
476 *Alignment = 0;
477 PtrOperand = XCHG->getPointerOperand();
478 }
479
480 if (PtrOperand) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000481 // Do not instrument accesses from different address spaces; we cannot deal
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000482 // with them.
483 Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType());
484 if (PtrTy->getPointerAddressSpace() != 0)
485 return nullptr;
486
487 // Ignore swifterror addresses.
488 // swifterror memory addresses are mem2reg promoted by instruction
489 // selection. As such they cannot have regular uses like an instrumentation
490 // function and it makes no sense to track them as memory.
491 if (PtrOperand->isSwiftError())
492 return nullptr;
493 }
494
495 return PtrOperand;
496}
497
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000498static unsigned getPointerOperandIndex(Instruction *I) {
499 if (LoadInst *LI = dyn_cast<LoadInst>(I))
500 return LI->getPointerOperandIndex();
501 if (StoreInst *SI = dyn_cast<StoreInst>(I))
502 return SI->getPointerOperandIndex();
503 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I))
504 return RMW->getPointerOperandIndex();
505 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I))
506 return XCHG->getPointerOperandIndex();
507 report_fatal_error("Unexpected instruction");
508 return -1;
509}
510
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000511static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
512 size_t Res = countTrailingZeros(TypeSize / 8);
513 assert(Res < kNumberOfAccessSizes);
514 return Res;
515}
516
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000517void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) {
518 if (TargetTriple.isAArch64())
519 return;
520
521 IRBuilder<> IRB(I);
522 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
523 Value *UntaggedPtr =
524 IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType());
525 I->setOperand(getPointerOperandIndex(I), UntaggedPtr);
526}
527
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000528Value *HWAddressSanitizer::shadowBase() {
529 if (LocalDynamicShadow)
530 return LocalDynamicShadow;
531 return ConstantExpr::getIntToPtr(ConstantInt::get(IntptrTy, Mapping.Offset),
532 Int8PtrTy);
533}
534
535Value *HWAddressSanitizer::memToShadow(Value *Mem, IRBuilder<> &IRB) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000536 // Mem >> Scale
537 Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale);
538 if (Mapping.Offset == 0)
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000539 return IRB.CreateIntToPtr(Shadow, Int8PtrTy);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000540 // (Mem >> Scale) + Offset
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000541 return IRB.CreateGEP(Int8Ty, shadowBase(), Shadow);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000542}
543
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000544void HWAddressSanitizer::instrumentMemAccessInline(Value *Ptr, bool IsWrite,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000545 unsigned AccessSizeIndex,
546 Instruction *InsertBefore) {
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000547 const int64_t AccessInfo = Recover * 0x20 + IsWrite * 0x10 + AccessSizeIndex;
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000548 IRBuilder<> IRB(InsertBefore);
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000549
550 if (!ClInlineAllChecks && TargetTriple.isAArch64() &&
551 TargetTriple.isOSBinFormatELF() && !Recover) {
552 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
553 Ptr = IRB.CreateBitCast(Ptr, Int8PtrTy);
554 IRB.CreateCall(
555 Intrinsic::getDeclaration(M, Intrinsic::hwasan_check_memaccess),
556 {shadowBase(), Ptr, ConstantInt::get(Int32Ty, AccessInfo)});
557 return;
558 }
559
560 Value *PtrLong = IRB.CreatePointerCast(Ptr, IntptrTy);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000561 Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, kPointerTagShift),
562 IRB.getInt8Ty());
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000563 Value *AddrLong = untagPointer(IRB, PtrLong);
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000564 Value *Shadow = memToShadow(AddrLong, IRB);
James Y Knight14359ef2019-02-01 20:44:24 +0000565 Value *MemTag = IRB.CreateLoad(Int8Ty, Shadow);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000566 Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag);
567
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000568 int matchAllTag = ClMatchAllTag.getNumOccurrences() > 0 ?
569 ClMatchAllTag : (CompileKernel ? 0xFF : -1);
570 if (matchAllTag != -1) {
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000571 Value *TagNotIgnored = IRB.CreateICmpNE(PtrTag,
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000572 ConstantInt::get(PtrTag->getType(), matchAllTag));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000573 TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored);
574 }
575
Chandler Carruth4a2d58e2018-10-15 09:34:05 +0000576 Instruction *CheckTerm =
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000577 SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, !Recover,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000578 MDBuilder(*C).createBranchWeights(1, 100000));
579
580 IRB.SetInsertPoint(CheckTerm);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000581 InlineAsm *Asm;
582 switch (TargetTriple.getArch()) {
583 case Triple::x86_64:
584 // The signal handler will find the data address in rdi.
585 Asm = InlineAsm::get(
586 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
587 "int3\nnopl " + itostr(0x40 + AccessInfo) + "(%rax)",
588 "{rdi}",
589 /*hasSideEffects=*/true);
590 break;
591 case Triple::aarch64:
592 case Triple::aarch64_be:
593 // The signal handler will find the data address in x0.
594 Asm = InlineAsm::get(
595 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
596 "brk #" + itostr(0x900 + AccessInfo),
597 "{x0}",
598 /*hasSideEffects=*/true);
599 break;
600 default:
601 report_fatal_error("unsupported architecture");
602 }
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000603 IRB.CreateCall(Asm, PtrLong);
604}
605
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000606void HWAddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
607 IRBuilder<> IRB(MI);
608 if (isa<MemTransferInst>(MI)) {
609 IRB.CreateCall(
610 isa<MemMoveInst>(MI) ? HWAsanMemmove : HWAsanMemcpy,
611 {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
612 IRB.CreatePointerCast(MI->getOperand(1), IRB.getInt8PtrTy()),
613 IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
614 } else if (isa<MemSetInst>(MI)) {
615 IRB.CreateCall(
616 HWAsanMemset,
617 {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
618 IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false),
619 IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
620 }
621 MI->eraseFromParent();
622}
623
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000624bool HWAddressSanitizer::instrumentMemAccess(Instruction *I) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000625 LLVM_DEBUG(dbgs() << "Instrumenting: " << *I << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000626 bool IsWrite = false;
627 unsigned Alignment = 0;
628 uint64_t TypeSize = 0;
629 Value *MaybeMask = nullptr;
Eugene Leviant2d98eb12018-12-20 09:04:33 +0000630
631 if (ClInstrumentMemIntrinsics && isa<MemIntrinsic>(I)) {
632 instrumentMemIntrinsic(cast<MemIntrinsic>(I));
633 return true;
634 }
635
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000636 Value *Addr =
637 isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask);
638
639 if (!Addr)
640 return false;
641
642 if (MaybeMask)
643 return false; //FIXME
644
645 IRBuilder<> IRB(I);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000646 if (isPowerOf2_64(TypeSize) &&
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000647 (TypeSize / 8 <= (1UL << (kNumberOfAccessSizes - 1))) &&
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000648 (Alignment >= (1UL << Mapping.Scale) || Alignment == 0 ||
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000649 Alignment >= TypeSize / 8)) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000650 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000651 if (ClInstrumentWithCalls) {
652 IRB.CreateCall(HwasanMemoryAccessCallback[IsWrite][AccessSizeIndex],
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000653 IRB.CreatePointerCast(Addr, IntptrTy));
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000654 } else {
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000655 instrumentMemAccessInline(Addr, IsWrite, AccessSizeIndex, I);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000656 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000657 } else {
658 IRB.CreateCall(HwasanMemoryAccessCallbackSized[IsWrite],
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000659 {IRB.CreatePointerCast(Addr, IntptrTy),
660 ConstantInt::get(IntptrTy, TypeSize / 8)});
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000661 }
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000662 untagPointerOperand(I, Addr);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000663
664 return true;
665}
666
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000667static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) {
668 uint64_t ArraySize = 1;
669 if (AI.isArrayAllocation()) {
670 const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
671 assert(CI && "non-constant array size");
672 ArraySize = CI->getZExtValue();
673 }
674 Type *Ty = AI.getAllocatedType();
675 uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
676 return SizeInBytes * ArraySize;
677}
678
679bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI,
680 Value *Tag) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000681 size_t Size = (getAllocaSizeInBytes(*AI) + Mapping.getAllocaAlignment() - 1) &
682 ~(Mapping.getAllocaAlignment() - 1);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000683
684 Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty());
685 if (ClInstrumentWithCalls) {
686 IRB.CreateCall(HwasanTagMemoryFunc,
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000687 {IRB.CreatePointerCast(AI, Int8PtrTy), JustTag,
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000688 ConstantInt::get(IntptrTy, Size)});
689 } else {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000690 size_t ShadowSize = Size >> Mapping.Scale;
Peter Collingbourne73078ec2019-01-23 02:20:10 +0000691 Value *ShadowPtr = memToShadow(IRB.CreatePointerCast(AI, IntptrTy), IRB);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000692 // If this memset is not inlined, it will be intercepted in the hwasan
693 // runtime library. That's OK, because the interceptor skips the checks if
694 // the address is in the shadow region.
695 // FIXME: the interceptor is not as fast as real memset. Consider lowering
696 // llvm.memset right here into either a sequence of stores, or a call to
697 // hwasan_tag_memory.
698 IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, /*Align=*/1);
699 }
700 return true;
701}
702
703static unsigned RetagMask(unsigned AllocaNo) {
704 // A list of 8-bit numbers that have at most one run of non-zero bits.
705 // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these
706 // masks.
707 // The list does not include the value 255, which is used for UAR.
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000708 //
709 // Because we are more likely to use earlier elements of this list than later
710 // ones, it is sorted in increasing order of probability of collision with a
711 // mask allocated (temporally) nearby. The program that generated this list
712 // can be found at:
713 // https://github.com/google/sanitizers/blob/master/hwaddress-sanitizer/sort_masks.py
714 static unsigned FastMasks[] = {0, 128, 64, 192, 32, 96, 224, 112, 240,
715 48, 16, 120, 248, 56, 24, 8, 124, 252,
716 60, 28, 12, 4, 126, 254, 62, 30, 14,
717 6, 2, 127, 63, 31, 15, 7, 3, 1};
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000718 return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))];
719}
720
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000721Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) {
722 return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy);
723}
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000724
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000725Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) {
726 if (ClGenerateTagsWithCalls)
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000727 return getNextTagWithCall(IRB);
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000728 if (StackBaseTag)
729 return StackBaseTag;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000730 // FIXME: use addressofreturnaddress (but implement it in aarch64 backend
731 // first).
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000732 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
733 auto GetStackPointerFn =
734 Intrinsic::getDeclaration(M, Intrinsic::frameaddress);
735 Value *StackPointer = IRB.CreateCall(
736 GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())});
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000737
738 // Extract some entropy from the stack pointer for the tags.
739 // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ
740 // between functions).
741 Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy);
742 Value *StackTag =
743 IRB.CreateXor(StackPointerLong, IRB.CreateLShr(StackPointerLong, 20),
744 "hwasan.stack.base.tag");
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000745 return StackTag;
746}
747
748Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag,
749 AllocaInst *AI, unsigned AllocaNo) {
750 if (ClGenerateTagsWithCalls)
751 return getNextTagWithCall(IRB);
752 return IRB.CreateXor(StackTag,
753 ConstantInt::get(IntptrTy, RetagMask(AllocaNo)));
754}
755
756Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) {
Alex Shlyapnikov788764c2018-06-29 20:20:17 +0000757 if (ClUARRetagToZero)
758 return ConstantInt::get(IntptrTy, 0);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000759 if (ClGenerateTagsWithCalls)
760 return getNextTagWithCall(IRB);
761 return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, 0xFFU));
762}
763
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000764// Add a tag to an address.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000765Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty,
766 Value *PtrLong, Value *Tag) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000767 Value *TaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000768 if (CompileKernel) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000769 // Kernel addresses have 0xFF in the most significant byte.
770 Value *ShiftedTag = IRB.CreateOr(
771 IRB.CreateShl(Tag, kPointerTagShift),
772 ConstantInt::get(IntptrTy, (1ULL << kPointerTagShift) - 1));
773 TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag);
774 } else {
775 // Userspace can simply do OR (tag << 56);
776 Value *ShiftedTag = IRB.CreateShl(Tag, kPointerTagShift);
777 TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag);
778 }
779 return IRB.CreateIntToPtr(TaggedPtrLong, Ty);
780}
781
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000782// Remove tag from an address.
783Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) {
784 Value *UntaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000785 if (CompileKernel) {
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000786 // Kernel addresses have 0xFF in the most significant byte.
787 UntaggedPtrLong = IRB.CreateOr(PtrLong,
788 ConstantInt::get(PtrLong->getType(), 0xFFULL << kPointerTagShift));
789 } else {
790 // Userspace addresses have 0x00.
791 UntaggedPtrLong = IRB.CreateAnd(PtrLong,
792 ConstantInt::get(PtrLong->getType(), ~(0xFFULL << kPointerTagShift)));
793 }
794 return UntaggedPtrLong;
795}
796
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000797Value *HWAddressSanitizer::getHwasanThreadSlotPtr(IRBuilder<> &IRB, Type *Ty) {
798 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
799 if (TargetTriple.isAArch64() && TargetTriple.isAndroid()) {
Evgeniy Stepanov0184c532019-01-05 00:44:58 +0000800 // Android provides a fixed TLS slot for sanitizers. See TLS_SLOT_SANITIZER
801 // in Bionic's libc/private/bionic_tls.h.
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000802 Function *ThreadPointerFunc =
803 Intrinsic::getDeclaration(M, Intrinsic::thread_pointer);
804 Value *SlotPtr = IRB.CreatePointerCast(
James Y Knight77160752019-02-01 20:44:47 +0000805 IRB.CreateConstGEP1_32(IRB.getInt8Ty(),
806 IRB.CreateCall(ThreadPointerFunc), 0x30),
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000807 Ty->getPointerTo(0));
808 return SlotPtr;
809 }
810 if (ThreadPtrGlobal)
811 return ThreadPtrGlobal;
812
813
814 return nullptr;
815}
816
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000817void HWAddressSanitizer::emitPrologue(IRBuilder<> &IRB, bool WithFrameRecord) {
818 if (!Mapping.InTls) {
819 LocalDynamicShadow = getDynamicShadowNonTls(IRB);
820 return;
821 }
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000822
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000823 if (!WithFrameRecord && TargetTriple.isAndroid()) {
824 LocalDynamicShadow = getDynamicShadowIfunc(IRB);
825 return;
826 }
Peter Collingbourne020ce3f2019-01-23 22:39:11 +0000827
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000828 Value *SlotPtr = getHwasanThreadSlotPtr(IRB, IntptrTy);
829 assert(SlotPtr);
830
James Y Knight14359ef2019-02-01 20:44:24 +0000831 Instruction *ThreadLong = IRB.CreateLoad(IntptrTy, SlotPtr);
Peter Collingbourne87f477b2019-01-04 19:27:04 +0000832
833 Function *F = IRB.GetInsertBlock()->getParent();
834 if (F->getFnAttribute("hwasan-abi").getValueAsString() == "interceptor") {
835 Value *ThreadLongEqZero =
836 IRB.CreateICmpEQ(ThreadLong, ConstantInt::get(IntptrTy, 0));
837 auto *Br = cast<BranchInst>(SplitBlockAndInsertIfThen(
838 ThreadLongEqZero, cast<Instruction>(ThreadLongEqZero)->getNextNode(),
839 false, MDBuilder(*C).createBranchWeights(1, 100000)));
840
841 IRB.SetInsertPoint(Br);
842 // FIXME: This should call a new runtime function with a custom calling
843 // convention to avoid needing to spill all arguments here.
844 IRB.CreateCall(HwasanThreadEnterFunc);
James Y Knight14359ef2019-02-01 20:44:24 +0000845 LoadInst *ReloadThreadLong = IRB.CreateLoad(IntptrTy, SlotPtr);
Peter Collingbourne87f477b2019-01-04 19:27:04 +0000846
847 IRB.SetInsertPoint(&*Br->getSuccessor(0)->begin());
848 PHINode *ThreadLongPhi = IRB.CreatePHI(IntptrTy, 2);
849 ThreadLongPhi->addIncoming(ThreadLong, ThreadLong->getParent());
850 ThreadLongPhi->addIncoming(ReloadThreadLong, ReloadThreadLong->getParent());
851 ThreadLong = ThreadLongPhi;
852 }
853
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000854 // Extract the address field from ThreadLong. Unnecessary on AArch64 with TBI.
855 Value *ThreadLongMaybeUntagged =
856 TargetTriple.isAArch64() ? ThreadLong : untagPointer(IRB, ThreadLong);
857
858 if (WithFrameRecord) {
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000859 StackBaseTag = IRB.CreateAShr(ThreadLong, 3);
860
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000861 // Prepare ring buffer data.
Peter Collingbourne5378afc2019-06-27 23:24:07 +0000862 Value *PC;
863 if (TargetTriple.getArch() == Triple::aarch64)
864 PC = readRegister(IRB, "pc");
865 else
866 PC = IRB.CreatePtrToInt(F, IntptrTy);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000867 auto GetStackPointerFn =
868 Intrinsic::getDeclaration(F->getParent(), Intrinsic::frameaddress);
869 Value *SP = IRB.CreatePtrToInt(
870 IRB.CreateCall(GetStackPointerFn,
871 {Constant::getNullValue(IRB.getInt32Ty())}),
872 IntptrTy);
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000873 // Mix SP and PC.
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000874 // Assumptions:
875 // PC is 0x0000PPPPPPPPPPPP (48 bits are meaningful, others are zero)
876 // SP is 0xsssssssssssSSSS0 (4 lower bits are zero)
877 // We only really need ~20 lower non-zero bits (SSSS), so we mix like this:
878 // 0xSSSSPPPPPPPPPPPP
879 SP = IRB.CreateShl(SP, 44);
880
881 // Store data to ring buffer.
882 Value *RecordPtr =
883 IRB.CreateIntToPtr(ThreadLongMaybeUntagged, IntptrTy->getPointerTo(0));
884 IRB.CreateStore(IRB.CreateOr(PC, SP), RecordPtr);
885
886 // Update the ring buffer. Top byte of ThreadLong defines the size of the
887 // buffer in pages, it must be a power of two, and the start of the buffer
888 // must be aligned by twice that much. Therefore wrap around of the ring
889 // buffer is simply Addr &= ~((ThreadLong >> 56) << 12).
890 // The use of AShr instead of LShr is due to
891 // https://bugs.llvm.org/show_bug.cgi?id=39030
892 // Runtime library makes sure not to use the highest bit.
893 Value *WrapMask = IRB.CreateXor(
894 IRB.CreateShl(IRB.CreateAShr(ThreadLong, 56), 12, "", true, true),
895 ConstantInt::get(IntptrTy, (uint64_t)-1));
896 Value *ThreadLongNew = IRB.CreateAnd(
897 IRB.CreateAdd(ThreadLong, ConstantInt::get(IntptrTy, 8)), WrapMask);
898 IRB.CreateStore(ThreadLongNew, SlotPtr);
899 }
900
901 // Get shadow base address by aligning RecordPtr up.
902 // Note: this is not correct if the pointer is already aligned.
903 // Runtime library will make sure this never happens.
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000904 LocalDynamicShadow = IRB.CreateAdd(
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000905 IRB.CreateOr(
906 ThreadLongMaybeUntagged,
907 ConstantInt::get(IntptrTy, (1ULL << kShadowBaseAlignment) - 1)),
908 ConstantInt::get(IntptrTy, 1), "hwasan.shadow");
Peter Collingbourned57f7cc2019-06-17 23:39:51 +0000909 LocalDynamicShadow = IRB.CreateIntToPtr(LocalDynamicShadow, Int8PtrTy);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000910}
911
Peter Collingbourne5378afc2019-06-27 23:24:07 +0000912Value *HWAddressSanitizer::readRegister(IRBuilder<> &IRB, StringRef Name) {
913 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000914 Function *ReadRegister =
915 Intrinsic::getDeclaration(M, Intrinsic::read_register, IntptrTy);
Peter Collingbourne5378afc2019-06-27 23:24:07 +0000916 MDNode *MD = MDNode::get(*C, {MDString::get(*C, Name)});
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000917 Value *Args[] = {MetadataAsValue::get(*C, MD)};
Peter Collingbourne5378afc2019-06-27 23:24:07 +0000918 return IRB.CreateCall(ReadRegister, Args);
919}
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000920
Peter Collingbourne5378afc2019-06-27 23:24:07 +0000921bool HWAddressSanitizer::instrumentLandingPads(
922 SmallVectorImpl<Instruction *> &LandingPadVec) {
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000923 for (auto *LP : LandingPadVec) {
924 IRBuilder<> IRB(LP->getNextNode());
Peter Collingbourne5378afc2019-06-27 23:24:07 +0000925 IRB.CreateCall(
926 HWAsanHandleVfork,
927 {readRegister(IRB, (TargetTriple.getArch() == Triple::x86_64) ? "rsp"
928 : "sp")});
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +0000929 }
930 return true;
931}
932
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000933bool HWAddressSanitizer::instrumentStack(
934 SmallVectorImpl<AllocaInst *> &Allocas,
Peter Collingbournefb9ce102019-06-17 23:39:41 +0000935 DenseMap<AllocaInst *, std::vector<DbgDeclareInst *>> &AllocaDeclareMap,
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000936 SmallVectorImpl<Instruction *> &RetVec, Value *StackTag) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000937 // Ideally, we want to calculate tagged stack base pointer, and rewrite all
938 // alloca addresses using that. Unfortunately, offsets are not known yet
939 // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a
940 // temp, shift-OR it into each alloca address and xor with the retag mask.
941 // This generates one extra instruction per alloca use.
942 for (unsigned N = 0; N < Allocas.size(); ++N) {
943 auto *AI = Allocas[N];
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +0000944 IRBuilder<> IRB(AI->getNextNode());
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000945
946 // Replace uses of the alloca with tagged address.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000947 Value *Tag = getAllocaTag(IRB, StackTag, AI, N);
948 Value *AILong = IRB.CreatePointerCast(AI, IntptrTy);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000949 Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000950 std::string Name =
951 AI->hasName() ? AI->getName().str() : "alloca." + itostr(N);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000952 Replacement->setName(Name + ".hwasan");
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000953
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000954 for (auto UI = AI->use_begin(), UE = AI->use_end(); UI != UE;) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000955 Use &U = *UI++;
956 if (U.getUser() != AILong)
957 U.set(Replacement);
958 }
959
Peter Collingbournefb9ce102019-06-17 23:39:41 +0000960 for (auto *DDI : AllocaDeclareMap.lookup(AI)) {
961 DIExpression *OldExpr = DDI->getExpression();
962 DIExpression *NewExpr = DIExpression::append(
963 OldExpr, {dwarf::DW_OP_LLVM_tag_offset, RetagMask(N)});
964 DDI->setArgOperand(2, MetadataAsValue::get(*C, NewExpr));
965 }
966
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000967 tagAlloca(IRB, AI, Tag);
968
969 for (auto RI : RetVec) {
970 IRB.SetInsertPoint(RI);
971
972 // Re-tag alloca memory with the special UAR tag.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000973 Value *Tag = getUARTag(IRB, StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000974 tagAlloca(IRB, AI, Tag);
975 }
976 }
977
978 return true;
979}
980
981bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
982 return (AI.getAllocatedType()->isSized() &&
983 // FIXME: instrument dynamic allocas, too
984 AI.isStaticAlloca() &&
985 // alloca() may be called with 0 size, ignore it.
986 getAllocaSizeInBytes(AI) > 0 &&
987 // We are only interested in allocas not promotable to registers.
988 // Promotable allocas are common under -O0.
989 !isAllocaPromotable(&AI) &&
990 // inalloca allocas are not treated as static, and we don't want
991 // dynamic alloca instrumentation for them as well.
992 !AI.isUsedWithInAlloca() &&
993 // swifterror allocas are register promoted by ISel
994 !AI.isSwiftError());
995}
996
Leonard Chan0cdd3b12019-05-14 21:17:21 +0000997bool HWAddressSanitizer::sanitizeFunction(Function &F) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000998 if (&F == HwasanCtorFunction)
999 return false;
1000
1001 if (!F.hasFnAttribute(Attribute::SanitizeHWAddress))
1002 return false;
1003
Nicola Zaghend34e60c2018-05-14 12:53:11 +00001004 LLVM_DEBUG(dbgs() << "Function: " << F.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001005
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001006 SmallVector<Instruction*, 16> ToInstrument;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +00001007 SmallVector<AllocaInst*, 8> AllocasToInstrument;
1008 SmallVector<Instruction*, 8> RetVec;
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +00001009 SmallVector<Instruction*, 8> LandingPadVec;
Peter Collingbournefb9ce102019-06-17 23:39:41 +00001010 DenseMap<AllocaInst *, std::vector<DbgDeclareInst *>> AllocaDeclareMap;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001011 for (auto &BB : F) {
1012 for (auto &Inst : BB) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +00001013 if (ClInstrumentStack)
1014 if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) {
1015 // Realign all allocas. We don't want small uninteresting allocas to
1016 // hide in instrumented alloca's padding.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001017 if (AI->getAlignment() < Mapping.getAllocaAlignment())
1018 AI->setAlignment(Mapping.getAllocaAlignment());
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +00001019 // Instrument some of them.
1020 if (isInterestingAlloca(*AI))
1021 AllocasToInstrument.push_back(AI);
1022 continue;
1023 }
1024
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001025 if (isa<ReturnInst>(Inst) || isa<ResumeInst>(Inst) ||
1026 isa<CleanupReturnInst>(Inst))
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +00001027 RetVec.push_back(&Inst);
1028
Peter Collingbournefb9ce102019-06-17 23:39:41 +00001029 if (auto *DDI = dyn_cast<DbgDeclareInst>(&Inst))
1030 if (auto *Alloca = dyn_cast_or_null<AllocaInst>(DDI->getAddress()))
1031 AllocaDeclareMap[Alloca].push_back(DDI);
1032
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +00001033 if (ClInstrumentLandingPads && isa<LandingPadInst>(Inst))
1034 LandingPadVec.push_back(&Inst);
1035
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001036 Value *MaybeMask = nullptr;
1037 bool IsWrite;
1038 unsigned Alignment;
1039 uint64_t TypeSize;
1040 Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize,
1041 &Alignment, &MaybeMask);
1042 if (Addr || isa<MemIntrinsic>(Inst))
1043 ToInstrument.push_back(&Inst);
1044 }
1045 }
1046
Evgeniy Stepanov7f281b22019-05-16 23:54:41 +00001047 initializeCallbacks(*F.getParent());
1048
1049 if (!LandingPadVec.empty())
1050 instrumentLandingPads(LandingPadVec);
1051
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001052 if (AllocasToInstrument.empty() && ToInstrument.empty())
1053 return false;
1054
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001055 assert(!LocalDynamicShadow);
1056
1057 Instruction *InsertPt = &*F.getEntryBlock().begin();
1058 IRBuilder<> EntryIRB(InsertPt);
Peter Collingbourned57f7cc2019-06-17 23:39:51 +00001059 emitPrologue(EntryIRB,
1060 /*WithFrameRecord*/ ClRecordStackHistory &&
1061 !AllocasToInstrument.empty());
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001062
1063 bool Changed = false;
1064 if (!AllocasToInstrument.empty()) {
1065 Value *StackTag =
1066 ClGenerateTagsWithCalls ? nullptr : getStackBaseTag(EntryIRB);
Peter Collingbournefb9ce102019-06-17 23:39:41 +00001067 Changed |= instrumentStack(AllocasToInstrument, AllocaDeclareMap, RetVec,
1068 StackTag);
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001069 }
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +00001070
Peter Collingbourne1a8acfb2019-01-25 02:08:46 +00001071 // If we split the entry block, move any allocas that were originally in the
1072 // entry block back into the entry block so that they aren't treated as
1073 // dynamic allocas.
1074 if (EntryIRB.GetInsertBlock() != &F.getEntryBlock()) {
1075 InsertPt = &*F.getEntryBlock().begin();
1076 for (auto II = EntryIRB.GetInsertBlock()->begin(),
1077 IE = EntryIRB.GetInsertBlock()->end();
1078 II != IE;) {
1079 Instruction *I = &*II++;
1080 if (auto *AI = dyn_cast<AllocaInst>(I))
1081 if (isa<ConstantInt>(AI->getArraySize()))
1082 I->moveBefore(InsertPt);
1083 }
1084 }
1085
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001086 for (auto Inst : ToInstrument)
1087 Changed |= instrumentMemAccess(Inst);
1088
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001089 LocalDynamicShadow = nullptr;
Peter Collingbourned57f7cc2019-06-17 23:39:51 +00001090 StackBaseTag = nullptr;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001091
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001092 return Changed;
1093}
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001094
1095void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001096 Scale = kDefaultShadowScale;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001097 if (ClMappingOffset.getNumOccurrences() > 0) {
1098 InGlobal = false;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001099 InTls = false;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001100 Offset = ClMappingOffset;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001101 } else if (ClEnableKhwasan || ClInstrumentWithCalls) {
1102 InGlobal = false;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001103 InTls = false;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001104 Offset = 0;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001105 } else if (ClWithIfunc) {
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001106 InGlobal = true;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001107 InTls = false;
1108 Offset = kDynamicShadowSentinel;
1109 } else if (ClWithTls) {
1110 InGlobal = false;
1111 InTls = true;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001112 Offset = kDynamicShadowSentinel;
1113 } else {
1114 InGlobal = false;
Evgeniy Stepanov090f0f92018-09-24 23:03:34 +00001115 InTls = false;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +00001116 Offset = kDynamicShadowSentinel;
1117 }
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +00001118}