blob: f3d8ae45bd148cb9080cfcdaa190ac2e9727ce85 [file] [log] [blame]
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +00001//===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file
11/// This file is a part of HWAddressSanitizer, an address sanity checker
12/// based on tagged addressing.
13//===----------------------------------------------------------------------===//
14
15#include "llvm/ADT/SmallVector.h"
16#include "llvm/ADT/StringExtras.h"
17#include "llvm/ADT/StringRef.h"
18#include "llvm/ADT/Triple.h"
19#include "llvm/IR/Attributes.h"
20#include "llvm/IR/BasicBlock.h"
21#include "llvm/IR/Constant.h"
22#include "llvm/IR/Constants.h"
23#include "llvm/IR/DataLayout.h"
24#include "llvm/IR/DerivedTypes.h"
25#include "llvm/IR/Function.h"
26#include "llvm/IR/IRBuilder.h"
27#include "llvm/IR/InlineAsm.h"
28#include "llvm/IR/InstVisitor.h"
29#include "llvm/IR/Instruction.h"
30#include "llvm/IR/Instructions.h"
31#include "llvm/IR/IntrinsicInst.h"
32#include "llvm/IR/Intrinsics.h"
33#include "llvm/IR/LLVMContext.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000034#include "llvm/IR/MDBuilder.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000035#include "llvm/IR/Module.h"
36#include "llvm/IR/Type.h"
37#include "llvm/IR/Value.h"
38#include "llvm/Pass.h"
39#include "llvm/Support/Casting.h"
40#include "llvm/Support/CommandLine.h"
41#include "llvm/Support/Debug.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000042#include "llvm/Support/raw_ostream.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000043#include "llvm/Transforms/Instrumentation.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000044#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000045#include "llvm/Transforms/Utils/ModuleUtils.h"
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000046#include "llvm/Transforms/Utils/PromoteMemToReg.h"
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000047
48using namespace llvm;
49
50#define DEBUG_TYPE "hwasan"
51
52static const char *const kHwasanModuleCtorName = "hwasan.module_ctor";
53static const char *const kHwasanInitName = "__hwasan_init";
54
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000055static const char *const kHwasanShadowMemoryDynamicAddress =
56 "__hwasan_shadow_memory_dynamic_address";
57
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000058// Accesses sizes are powers of two: 1, 2, 4, 8, 16.
59static const size_t kNumberOfAccessSizes = 5;
60
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +000061static const size_t kDefaultShadowScale = 4;
62static const uint64_t kDynamicShadowSentinel =
63 std::numeric_limits<uint64_t>::max();
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000064static const unsigned kPointerTagShift = 56;
65
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000066static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
67 "hwasan-memory-access-callback-prefix",
68 cl::desc("Prefix for memory access callbacks"), cl::Hidden,
69 cl::init("__hwasan_"));
70
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +000071static cl::opt<bool>
72 ClInstrumentWithCalls("hwasan-instrument-with-calls",
73 cl::desc("instrument reads and writes with callbacks"),
74 cl::Hidden, cl::init(false));
75
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +000076static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads",
77 cl::desc("instrument read instructions"),
78 cl::Hidden, cl::init(true));
79
80static cl::opt<bool> ClInstrumentWrites(
81 "hwasan-instrument-writes", cl::desc("instrument write instructions"),
82 cl::Hidden, cl::init(true));
83
84static cl::opt<bool> ClInstrumentAtomics(
85 "hwasan-instrument-atomics",
86 cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
87 cl::init(true));
88
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +000089static cl::opt<bool> ClRecover(
90 "hwasan-recover",
91 cl::desc("Enable recovery mode (continue-after-error)."),
92 cl::Hidden, cl::init(false));
93
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +000094static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack",
95 cl::desc("instrument stack (allocas)"),
96 cl::Hidden, cl::init(true));
97
Alex Shlyapnikov788764c2018-06-29 20:20:17 +000098static cl::opt<bool> ClUARRetagToZero(
99 "hwasan-uar-retag-to-zero",
100 cl::desc("Clear alloca tags before returning from the function to allow "
101 "non-instrumented and instrumented function calls mix. When set "
102 "to false, allocas are retagged before returning from the "
103 "function to detect use after return."),
104 cl::Hidden, cl::init(true));
105
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000106static cl::opt<bool> ClGenerateTagsWithCalls(
107 "hwasan-generate-tags-with-calls",
108 cl::desc("generate new tags with runtime library calls"), cl::Hidden,
109 cl::init(false));
110
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000111static cl::opt<int> ClMatchAllTag(
112 "hwasan-match-all-tag",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000113 cl::desc("don't report bad accesses via pointers with this tag"),
114 cl::Hidden, cl::init(-1));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000115
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000116static cl::opt<bool> ClEnableKhwasan(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000117 "hwasan-kernel",
118 cl::desc("Enable KernelHWAddressSanitizer instrumentation"),
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000119 cl::Hidden, cl::init(false));
120
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000121// These flags allow to change the shadow mapping and control how shadow memory
122// is accessed. The shadow mapping looks like:
123// Shadow = (Mem >> scale) + offset
124
125static cl::opt<unsigned long long> ClMappingOffset(
126 "hwasan-mapping-offset",
127 cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"), cl::Hidden,
128 cl::init(0));
129
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000130static cl::opt<bool>
131 ClWithIfunc("hwasan-with-ifunc",
132 cl::desc("Access dynamic shadow through an ifunc global on "
133 "platforms that support this"),
134 cl::Hidden, cl::init(false));
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000135namespace {
136
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000137/// An instrumentation pass implementing detection of addressability bugs
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000138/// using tagged pointers.
139class HWAddressSanitizer : public FunctionPass {
140public:
141 // Pass identification, replacement for typeid.
142 static char ID;
143
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000144 explicit HWAddressSanitizer(bool CompileKernel = false, bool Recover = false)
145 : FunctionPass(ID) {
146 this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover;
147 this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0 ?
148 ClEnableKhwasan : CompileKernel;
149 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000150
151 StringRef getPassName() const override { return "HWAddressSanitizer"; }
152
153 bool runOnFunction(Function &F) override;
154 bool doInitialization(Module &M) override;
155
156 void initializeCallbacks(Module &M);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000157
158 void maybeInsertDynamicShadowAtFunctionEntry(Function &F);
159
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000160 void untagPointerOperand(Instruction *I, Value *Addr);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000161 Value *memToShadow(Value *Shadow, Type *Ty, IRBuilder<> &IRB);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000162 void instrumentMemAccessInline(Value *PtrLong, bool IsWrite,
163 unsigned AccessSizeIndex,
164 Instruction *InsertBefore);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000165 bool instrumentMemAccess(Instruction *I);
166 Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite,
167 uint64_t *TypeSize, unsigned *Alignment,
168 Value **MaybeMask);
169
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000170 bool isInterestingAlloca(const AllocaInst &AI);
171 bool tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000172 Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag);
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000173 Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000174 bool instrumentStack(SmallVectorImpl<AllocaInst *> &Allocas,
175 SmallVectorImpl<Instruction *> &RetVec);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000176 Value *getNextTagWithCall(IRBuilder<> &IRB);
177 Value *getStackBaseTag(IRBuilder<> &IRB);
178 Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI,
179 unsigned AllocaNo);
180 Value *getUARTag(IRBuilder<> &IRB, Value *StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000181
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000182private:
183 LLVMContext *C;
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000184 Triple TargetTriple;
185
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000186 /// This struct defines the shadow mapping using the rule:
187 /// shadow = (mem >> Scale) + Offset.
188 /// If InGlobal is true, then
189 /// extern char __hwasan_shadow[];
190 /// shadow = (mem >> Scale) + &__hwasan_shadow
191 struct ShadowMapping {
192 int Scale;
193 uint64_t Offset;
194 bool InGlobal;
195
196 void init(Triple &TargetTriple);
197 unsigned getAllocaAlignment() const { return 1U << Scale; }
198 };
199 ShadowMapping Mapping;
200
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000201 Type *IntptrTy;
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000202 Type *Int8PtrTy;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000203 Type *Int8Ty;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000204
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000205 bool CompileKernel;
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000206 bool Recover;
207
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000208 Function *HwasanCtorFunction;
209
210 Function *HwasanMemoryAccessCallback[2][kNumberOfAccessSizes];
211 Function *HwasanMemoryAccessCallbackSized[2];
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000212
213 Function *HwasanTagMemoryFunc;
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000214 Function *HwasanGenerateTagFunc;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000215
216 Constant *ShadowGlobal;
217
218 Value *LocalDynamicShadow = nullptr;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000219};
220
221} // end anonymous namespace
222
223char HWAddressSanitizer::ID = 0;
224
225INITIALIZE_PASS_BEGIN(
226 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000227 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
228 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000229INITIALIZE_PASS_END(
230 HWAddressSanitizer, "hwasan",
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000231 "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
232 false)
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000233
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000234FunctionPass *llvm::createHWAddressSanitizerPass(bool CompileKernel,
235 bool Recover) {
236 assert(!CompileKernel || Recover);
237 return new HWAddressSanitizer(CompileKernel, Recover);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000238}
239
Adrian Prantl5f8f34e42018-05-01 15:54:18 +0000240/// Module-level initialization.
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000241///
242/// inserts a call to __hwasan_init to the module's constructor list.
243bool HWAddressSanitizer::doInitialization(Module &M) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000244 LLVM_DEBUG(dbgs() << "Init " << M.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000245 auto &DL = M.getDataLayout();
246
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000247 TargetTriple = Triple(M.getTargetTriple());
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000248
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000249 Mapping.init(TargetTriple);
250
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000251 C = &(M.getContext());
252 IRBuilder<> IRB(*C);
253 IntptrTy = IRB.getIntPtrTy(DL);
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000254 Int8PtrTy = IRB.getInt8PtrTy();
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000255 Int8Ty = IRB.getInt8Ty();
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000256
Benjamin Kramerbfc1d972018-01-18 14:19:04 +0000257 HwasanCtorFunction = nullptr;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000258 if (!CompileKernel) {
Evgeniy Stepanov5bd669d2018-01-17 23:24:38 +0000259 std::tie(HwasanCtorFunction, std::ignore) =
260 createSanitizerCtorAndInitFunctions(M, kHwasanModuleCtorName,
261 kHwasanInitName,
262 /*InitArgTypes=*/{},
263 /*InitArgs=*/{});
264 appendToGlobalCtors(M, HwasanCtorFunction, 0);
265 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000266 return true;
267}
268
269void HWAddressSanitizer::initializeCallbacks(Module &M) {
270 IRBuilder<> IRB(*C);
271 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
272 const std::string TypeStr = AccessIsWrite ? "store" : "load";
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000273 const std::string EndingStr = Recover ? "_noabort" : "";
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000274
275 HwasanMemoryAccessCallbackSized[AccessIsWrite] =
276 checkSanitizerInterfaceFunction(M.getOrInsertFunction(
Evgeniy Stepanovc07e0bd2018-01-16 23:15:08 +0000277 ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr,
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000278 FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false)));
279
280 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
281 AccessSizeIndex++) {
282 HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] =
283 checkSanitizerInterfaceFunction(M.getOrInsertFunction(
284 ClMemoryAccessCallbackPrefix + TypeStr +
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000285 itostr(1ULL << AccessSizeIndex) + EndingStr,
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000286 FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false)));
287 }
288 }
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000289
290 HwasanTagMemoryFunc = checkSanitizerInterfaceFunction(M.getOrInsertFunction(
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000291 "__hwasan_tag_memory", IRB.getVoidTy(), Int8PtrTy, Int8Ty, IntptrTy));
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000292 HwasanGenerateTagFunc = checkSanitizerInterfaceFunction(
293 M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty));
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000294
295 if (Mapping.InGlobal)
296 ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow",
297 ArrayType::get(IRB.getInt8Ty(), 0));
298}
299
300void HWAddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) {
301 // Generate code only when dynamic addressing is needed.
302 if (Mapping.Offset != kDynamicShadowSentinel)
303 return;
304
305 IRBuilder<> IRB(&F.front().front());
306 if (Mapping.InGlobal) {
307 // An empty inline asm with input reg == output reg.
308 // An opaque pointer-to-int cast, basically.
309 InlineAsm *Asm = InlineAsm::get(
310 FunctionType::get(IntptrTy, {ShadowGlobal->getType()}, false),
311 StringRef(""), StringRef("=r,0"),
312 /*hasSideEffects=*/false);
313 LocalDynamicShadow = IRB.CreateCall(Asm, {ShadowGlobal}, ".hwasan.shadow");
314 } else {
315 Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
316 kHwasanShadowMemoryDynamicAddress, IntptrTy);
317 LocalDynamicShadow = IRB.CreateLoad(GlobalDynamicAddress);
318 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000319}
320
321Value *HWAddressSanitizer::isInterestingMemoryAccess(Instruction *I,
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000322 bool *IsWrite,
323 uint64_t *TypeSize,
324 unsigned *Alignment,
325 Value **MaybeMask) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000326 // Skip memory accesses inserted by another instrumentation.
327 if (I->getMetadata("nosanitize")) return nullptr;
328
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000329 // Do not instrument the load fetching the dynamic shadow address.
330 if (LocalDynamicShadow == I)
331 return nullptr;
332
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000333 Value *PtrOperand = nullptr;
334 const DataLayout &DL = I->getModule()->getDataLayout();
335 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
336 if (!ClInstrumentReads) return nullptr;
337 *IsWrite = false;
338 *TypeSize = DL.getTypeStoreSizeInBits(LI->getType());
339 *Alignment = LI->getAlignment();
340 PtrOperand = LI->getPointerOperand();
341 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
342 if (!ClInstrumentWrites) return nullptr;
343 *IsWrite = true;
344 *TypeSize = DL.getTypeStoreSizeInBits(SI->getValueOperand()->getType());
345 *Alignment = SI->getAlignment();
346 PtrOperand = SI->getPointerOperand();
347 } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
348 if (!ClInstrumentAtomics) return nullptr;
349 *IsWrite = true;
350 *TypeSize = DL.getTypeStoreSizeInBits(RMW->getValOperand()->getType());
351 *Alignment = 0;
352 PtrOperand = RMW->getPointerOperand();
353 } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
354 if (!ClInstrumentAtomics) return nullptr;
355 *IsWrite = true;
356 *TypeSize = DL.getTypeStoreSizeInBits(XCHG->getCompareOperand()->getType());
357 *Alignment = 0;
358 PtrOperand = XCHG->getPointerOperand();
359 }
360
361 if (PtrOperand) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000362 // Do not instrument accesses from different address spaces; we cannot deal
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000363 // with them.
364 Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType());
365 if (PtrTy->getPointerAddressSpace() != 0)
366 return nullptr;
367
368 // Ignore swifterror addresses.
369 // swifterror memory addresses are mem2reg promoted by instruction
370 // selection. As such they cannot have regular uses like an instrumentation
371 // function and it makes no sense to track them as memory.
372 if (PtrOperand->isSwiftError())
373 return nullptr;
374 }
375
376 return PtrOperand;
377}
378
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000379static unsigned getPointerOperandIndex(Instruction *I) {
380 if (LoadInst *LI = dyn_cast<LoadInst>(I))
381 return LI->getPointerOperandIndex();
382 if (StoreInst *SI = dyn_cast<StoreInst>(I))
383 return SI->getPointerOperandIndex();
384 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I))
385 return RMW->getPointerOperandIndex();
386 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I))
387 return XCHG->getPointerOperandIndex();
388 report_fatal_error("Unexpected instruction");
389 return -1;
390}
391
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000392static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
393 size_t Res = countTrailingZeros(TypeSize / 8);
394 assert(Res < kNumberOfAccessSizes);
395 return Res;
396}
397
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000398void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) {
399 if (TargetTriple.isAArch64())
400 return;
401
402 IRBuilder<> IRB(I);
403 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
404 Value *UntaggedPtr =
405 IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType());
406 I->setOperand(getPointerOperandIndex(I), UntaggedPtr);
407}
408
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000409Value *HWAddressSanitizer::memToShadow(Value *Mem, Type *Ty, IRBuilder<> &IRB) {
410 // Mem >> Scale
411 Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale);
412 if (Mapping.Offset == 0)
413 return Shadow;
414 // (Mem >> Scale) + Offset
415 Value *ShadowBase;
416 if (LocalDynamicShadow)
417 ShadowBase = LocalDynamicShadow;
418 else
419 ShadowBase = ConstantInt::get(Ty, Mapping.Offset);
420 return IRB.CreateAdd(Shadow, ShadowBase);
421}
422
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000423void HWAddressSanitizer::instrumentMemAccessInline(Value *PtrLong, bool IsWrite,
424 unsigned AccessSizeIndex,
425 Instruction *InsertBefore) {
426 IRBuilder<> IRB(InsertBefore);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000427 Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, kPointerTagShift),
428 IRB.getInt8Ty());
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000429 Value *AddrLong = untagPointer(IRB, PtrLong);
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000430 Value *ShadowLong = memToShadow(AddrLong, PtrLong->getType(), IRB);
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000431 Value *MemTag = IRB.CreateLoad(IRB.CreateIntToPtr(ShadowLong, Int8PtrTy));
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000432 Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag);
433
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000434 int matchAllTag = ClMatchAllTag.getNumOccurrences() > 0 ?
435 ClMatchAllTag : (CompileKernel ? 0xFF : -1);
436 if (matchAllTag != -1) {
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000437 Value *TagNotIgnored = IRB.CreateICmpNE(PtrTag,
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000438 ConstantInt::get(PtrTag->getType(), matchAllTag));
Evgeniy Stepanov1f1a7a72018-04-04 20:44:59 +0000439 TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored);
440 }
441
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000442 TerminatorInst *CheckTerm =
Evgeniy Stepanov3fd1b1a2017-12-20 19:05:44 +0000443 SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, !Recover,
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000444 MDBuilder(*C).createBranchWeights(1, 100000));
445
446 IRB.SetInsertPoint(CheckTerm);
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000447 const int64_t AccessInfo = Recover * 0x20 + IsWrite * 0x10 + AccessSizeIndex;
448 InlineAsm *Asm;
449 switch (TargetTriple.getArch()) {
450 case Triple::x86_64:
451 // The signal handler will find the data address in rdi.
452 Asm = InlineAsm::get(
453 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
454 "int3\nnopl " + itostr(0x40 + AccessInfo) + "(%rax)",
455 "{rdi}",
456 /*hasSideEffects=*/true);
457 break;
458 case Triple::aarch64:
459 case Triple::aarch64_be:
460 // The signal handler will find the data address in x0.
461 Asm = InlineAsm::get(
462 FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
463 "brk #" + itostr(0x900 + AccessInfo),
464 "{x0}",
465 /*hasSideEffects=*/true);
466 break;
467 default:
468 report_fatal_error("unsupported architecture");
469 }
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000470 IRB.CreateCall(Asm, PtrLong);
471}
472
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000473bool HWAddressSanitizer::instrumentMemAccess(Instruction *I) {
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000474 LLVM_DEBUG(dbgs() << "Instrumenting: " << *I << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000475 bool IsWrite = false;
476 unsigned Alignment = 0;
477 uint64_t TypeSize = 0;
478 Value *MaybeMask = nullptr;
479 Value *Addr =
480 isInterestingMemoryAccess(I, &IsWrite, &TypeSize, &Alignment, &MaybeMask);
481
482 if (!Addr)
483 return false;
484
485 if (MaybeMask)
486 return false; //FIXME
487
488 IRBuilder<> IRB(I);
489 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
490 if (isPowerOf2_64(TypeSize) &&
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000491 (TypeSize / 8 <= (1UL << (kNumberOfAccessSizes - 1))) &&
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000492 (Alignment >= (1UL << Mapping.Scale) || Alignment == 0 ||
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000493 Alignment >= TypeSize / 8)) {
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000494 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
Evgeniy Stepanovecb48e52017-12-13 01:16:34 +0000495 if (ClInstrumentWithCalls) {
496 IRB.CreateCall(HwasanMemoryAccessCallback[IsWrite][AccessSizeIndex],
497 AddrLong);
498 } else {
499 instrumentMemAccessInline(AddrLong, IsWrite, AccessSizeIndex, I);
500 }
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000501 } else {
502 IRB.CreateCall(HwasanMemoryAccessCallbackSized[IsWrite],
503 {AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8)});
504 }
Alex Shlyapnikov83e78412018-03-23 17:57:54 +0000505 untagPointerOperand(I, Addr);
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000506
507 return true;
508}
509
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000510static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) {
511 uint64_t ArraySize = 1;
512 if (AI.isArrayAllocation()) {
513 const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
514 assert(CI && "non-constant array size");
515 ArraySize = CI->getZExtValue();
516 }
517 Type *Ty = AI.getAllocatedType();
518 uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
519 return SizeInBytes * ArraySize;
520}
521
522bool HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI,
523 Value *Tag) {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000524 size_t Size = (getAllocaSizeInBytes(*AI) + Mapping.getAllocaAlignment() - 1) &
525 ~(Mapping.getAllocaAlignment() - 1);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000526
527 Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty());
528 if (ClInstrumentWithCalls) {
529 IRB.CreateCall(HwasanTagMemoryFunc,
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000530 {IRB.CreatePointerCast(AI, Int8PtrTy), JustTag,
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000531 ConstantInt::get(IntptrTy, Size)});
532 } else {
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000533 size_t ShadowSize = Size >> Mapping.Scale;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000534 Value *ShadowPtr = IRB.CreateIntToPtr(
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000535 memToShadow(IRB.CreatePointerCast(AI, IntptrTy), AI->getType(), IRB),
Evgeniy Stepanova265a132018-08-15 00:39:35 +0000536 Int8PtrTy);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000537 // If this memset is not inlined, it will be intercepted in the hwasan
538 // runtime library. That's OK, because the interceptor skips the checks if
539 // the address is in the shadow region.
540 // FIXME: the interceptor is not as fast as real memset. Consider lowering
541 // llvm.memset right here into either a sequence of stores, or a call to
542 // hwasan_tag_memory.
543 IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, /*Align=*/1);
544 }
545 return true;
546}
547
548static unsigned RetagMask(unsigned AllocaNo) {
549 // A list of 8-bit numbers that have at most one run of non-zero bits.
550 // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these
551 // masks.
552 // The list does not include the value 255, which is used for UAR.
553 static unsigned FastMasks[] = {
554 0, 1, 2, 3, 4, 6, 7, 8, 12, 14, 15, 16, 24,
555 28, 30, 31, 32, 48, 56, 60, 62, 63, 64, 96, 112, 120,
556 124, 126, 127, 128, 192, 224, 240, 248, 252, 254};
557 return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))];
558}
559
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000560Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) {
561 return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy);
562}
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000563
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000564Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) {
565 if (ClGenerateTagsWithCalls)
566 return nullptr;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000567 // FIXME: use addressofreturnaddress (but implement it in aarch64 backend
568 // first).
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000569 Module *M = IRB.GetInsertBlock()->getParent()->getParent();
570 auto GetStackPointerFn =
571 Intrinsic::getDeclaration(M, Intrinsic::frameaddress);
572 Value *StackPointer = IRB.CreateCall(
573 GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())});
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000574
575 // Extract some entropy from the stack pointer for the tags.
576 // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ
577 // between functions).
578 Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy);
579 Value *StackTag =
580 IRB.CreateXor(StackPointerLong, IRB.CreateLShr(StackPointerLong, 20),
581 "hwasan.stack.base.tag");
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000582 return StackTag;
583}
584
585Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag,
586 AllocaInst *AI, unsigned AllocaNo) {
587 if (ClGenerateTagsWithCalls)
588 return getNextTagWithCall(IRB);
589 return IRB.CreateXor(StackTag,
590 ConstantInt::get(IntptrTy, RetagMask(AllocaNo)));
591}
592
593Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) {
Alex Shlyapnikov788764c2018-06-29 20:20:17 +0000594 if (ClUARRetagToZero)
595 return ConstantInt::get(IntptrTy, 0);
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000596 if (ClGenerateTagsWithCalls)
597 return getNextTagWithCall(IRB);
598 return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, 0xFFU));
599}
600
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000601// Add a tag to an address.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000602Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty,
603 Value *PtrLong, Value *Tag) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000604 Value *TaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000605 if (CompileKernel) {
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000606 // Kernel addresses have 0xFF in the most significant byte.
607 Value *ShiftedTag = IRB.CreateOr(
608 IRB.CreateShl(Tag, kPointerTagShift),
609 ConstantInt::get(IntptrTy, (1ULL << kPointerTagShift) - 1));
610 TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag);
611 } else {
612 // Userspace can simply do OR (tag << 56);
613 Value *ShiftedTag = IRB.CreateShl(Tag, kPointerTagShift);
614 TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag);
615 }
616 return IRB.CreateIntToPtr(TaggedPtrLong, Ty);
617}
618
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000619// Remove tag from an address.
620Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) {
621 Value *UntaggedPtrLong;
Andrey Konovalov1ba9d9c2018-04-13 18:05:21 +0000622 if (CompileKernel) {
Evgeniy Stepanov43271b12018-02-21 19:52:23 +0000623 // Kernel addresses have 0xFF in the most significant byte.
624 UntaggedPtrLong = IRB.CreateOr(PtrLong,
625 ConstantInt::get(PtrLong->getType(), 0xFFULL << kPointerTagShift));
626 } else {
627 // Userspace addresses have 0x00.
628 UntaggedPtrLong = IRB.CreateAnd(PtrLong,
629 ConstantInt::get(PtrLong->getType(), ~(0xFFULL << kPointerTagShift)));
630 }
631 return UntaggedPtrLong;
632}
633
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000634bool HWAddressSanitizer::instrumentStack(
635 SmallVectorImpl<AllocaInst *> &Allocas,
636 SmallVectorImpl<Instruction *> &RetVec) {
637 Function *F = Allocas[0]->getParent()->getParent();
638 Instruction *InsertPt = &*F->getEntryBlock().begin();
639 IRBuilder<> IRB(InsertPt);
640
641 Value *StackTag = getStackBaseTag(IRB);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000642
643 // Ideally, we want to calculate tagged stack base pointer, and rewrite all
644 // alloca addresses using that. Unfortunately, offsets are not known yet
645 // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a
646 // temp, shift-OR it into each alloca address and xor with the retag mask.
647 // This generates one extra instruction per alloca use.
648 for (unsigned N = 0; N < Allocas.size(); ++N) {
649 auto *AI = Allocas[N];
650 IRB.SetInsertPoint(AI->getNextNode());
651
652 // Replace uses of the alloca with tagged address.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000653 Value *Tag = getAllocaTag(IRB, StackTag, AI, N);
654 Value *AILong = IRB.CreatePointerCast(AI, IntptrTy);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000655 Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000656 std::string Name =
657 AI->hasName() ? AI->getName().str() : "alloca." + itostr(N);
Evgeniy Stepanov80ccda22018-02-09 00:59:10 +0000658 Replacement->setName(Name + ".hwasan");
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000659
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000660 for (auto UI = AI->use_begin(), UE = AI->use_end(); UI != UE;) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000661 Use &U = *UI++;
662 if (U.getUser() != AILong)
663 U.set(Replacement);
664 }
665
666 tagAlloca(IRB, AI, Tag);
667
668 for (auto RI : RetVec) {
669 IRB.SetInsertPoint(RI);
670
671 // Re-tag alloca memory with the special UAR tag.
Evgeniy Stepanov080e0d42018-01-13 01:32:15 +0000672 Value *Tag = getUARTag(IRB, StackTag);
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000673 tagAlloca(IRB, AI, Tag);
674 }
675 }
676
677 return true;
678}
679
680bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
681 return (AI.getAllocatedType()->isSized() &&
682 // FIXME: instrument dynamic allocas, too
683 AI.isStaticAlloca() &&
684 // alloca() may be called with 0 size, ignore it.
685 getAllocaSizeInBytes(AI) > 0 &&
686 // We are only interested in allocas not promotable to registers.
687 // Promotable allocas are common under -O0.
688 !isAllocaPromotable(&AI) &&
689 // inalloca allocas are not treated as static, and we don't want
690 // dynamic alloca instrumentation for them as well.
691 !AI.isUsedWithInAlloca() &&
692 // swifterror allocas are register promoted by ISel
693 !AI.isSwiftError());
694}
695
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000696bool HWAddressSanitizer::runOnFunction(Function &F) {
697 if (&F == HwasanCtorFunction)
698 return false;
699
700 if (!F.hasFnAttribute(Attribute::SanitizeHWAddress))
701 return false;
702
Nicola Zaghend34e60c2018-05-14 12:53:11 +0000703 LLVM_DEBUG(dbgs() << "Function: " << F.getName() << "\n");
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000704
705 initializeCallbacks(*F.getParent());
706
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000707 assert(!LocalDynamicShadow);
708 maybeInsertDynamicShadowAtFunctionEntry(F);
709
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000710 bool Changed = false;
711 SmallVector<Instruction*, 16> ToInstrument;
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000712 SmallVector<AllocaInst*, 8> AllocasToInstrument;
713 SmallVector<Instruction*, 8> RetVec;
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000714 for (auto &BB : F) {
715 for (auto &Inst : BB) {
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000716 if (ClInstrumentStack)
717 if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) {
718 // Realign all allocas. We don't want small uninteresting allocas to
719 // hide in instrumented alloca's padding.
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000720 if (AI->getAlignment() < Mapping.getAllocaAlignment())
721 AI->setAlignment(Mapping.getAllocaAlignment());
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000722 // Instrument some of them.
723 if (isInterestingAlloca(*AI))
724 AllocasToInstrument.push_back(AI);
725 continue;
726 }
727
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000728 if (isa<ReturnInst>(Inst) || isa<ResumeInst>(Inst) ||
729 isa<CleanupReturnInst>(Inst))
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000730 RetVec.push_back(&Inst);
731
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000732 Value *MaybeMask = nullptr;
733 bool IsWrite;
734 unsigned Alignment;
735 uint64_t TypeSize;
736 Value *Addr = isInterestingMemoryAccess(&Inst, &IsWrite, &TypeSize,
737 &Alignment, &MaybeMask);
738 if (Addr || isa<MemIntrinsic>(Inst))
739 ToInstrument.push_back(&Inst);
740 }
741 }
742
Evgeniy Stepanov99fa3e72018-01-11 22:53:30 +0000743 if (!AllocasToInstrument.empty())
744 Changed |= instrumentStack(AllocasToInstrument, RetVec);
745
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000746 for (auto Inst : ToInstrument)
747 Changed |= instrumentMemAccess(Inst);
748
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000749 LocalDynamicShadow = nullptr;
750
Evgeniy Stepanovc667c1f2017-12-09 00:21:41 +0000751 return Changed;
752}
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000753
754void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple) {
755 const bool IsAndroid = TargetTriple.isAndroid();
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000756 const bool IsAndroidWithIfuncSupport =
757 IsAndroid && !TargetTriple.isAndroidVersionLT(21);
758
759 Scale = kDefaultShadowScale;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000760 const bool WithIfunc = ClWithIfunc.getNumOccurrences() > 0
761 ? ClWithIfunc
762 : IsAndroidWithIfuncSupport;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000763
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000764 if (ClMappingOffset.getNumOccurrences() > 0) {
765 InGlobal = false;
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000766 Offset = ClMappingOffset;
Evgeniy Stepanov453e7ac2018-08-10 16:21:37 +0000767 } else if (ClEnableKhwasan || ClInstrumentWithCalls) {
768 InGlobal = false;
769 Offset = 0;
770 } else if (WithIfunc) {
771 InGlobal = true;
772 Offset = kDynamicShadowSentinel;
773 } else {
774 InGlobal = false;
775 Offset = kDynamicShadowSentinel;
776 }
Alex Shlyapnikov99cf54b2018-04-20 20:04:04 +0000777}