1 //===-- AddressSanitizer.cpp - memory error detector ------------*- C++ -*-===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file is a part of AddressSanitizer, an address sanity checker.
11 // Details of the algorithm:
12 // http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
14 //===----------------------------------------------------------------------===//
16 #define DEBUG_TYPE "asan"
18 #include "llvm/Transforms/Instrumentation.h"
19 #include "llvm/ADT/ArrayRef.h"
20 #include "llvm/ADT/DenseMap.h"
21 #include "llvm/ADT/DepthFirstIterator.h"
22 #include "llvm/ADT/SmallSet.h"
23 #include "llvm/ADT/SmallString.h"
24 #include "llvm/ADT/SmallVector.h"
25 #include "llvm/ADT/Statistic.h"
26 #include "llvm/ADT/StringExtras.h"
27 #include "llvm/ADT/Triple.h"
28 #include "llvm/IR/CallSite.h"
29 #include "llvm/IR/DIBuilder.h"
30 #include "llvm/IR/DataLayout.h"
31 #include "llvm/IR/Function.h"
32 #include "llvm/IR/IRBuilder.h"
33 #include "llvm/IR/InlineAsm.h"
34 #include "llvm/IR/InstVisitor.h"
35 #include "llvm/IR/IntrinsicInst.h"
36 #include "llvm/IR/LLVMContext.h"
37 #include "llvm/IR/MDBuilder.h"
38 #include "llvm/IR/Module.h"
39 #include "llvm/IR/Type.h"
40 #include "llvm/Support/CommandLine.h"
41 #include "llvm/Support/DataTypes.h"
42 #include "llvm/Support/Debug.h"
43 #include "llvm/Support/Endian.h"
44 #include "llvm/Support/system_error.h"
45 #include "llvm/Transforms/Utils/ASanStackFrameLayout.h"
46 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
47 #include "llvm/Transforms/Utils/Cloning.h"
48 #include "llvm/Transforms/Utils/Local.h"
49 #include "llvm/Transforms/Utils/ModuleUtils.h"
50 #include "llvm/Transforms/Utils/SpecialCaseList.h"
56 static const uint64_t kDefaultShadowScale = 3;
57 static const uint64_t kDefaultShadowOffset32 = 1ULL << 29;
58 static const uint64_t kDefaultShadowOffset64 = 1ULL << 44;
59 static const uint64_t kSmallX86_64ShadowOffset = 0x7FFF8000; // < 2G.
60 static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 41;
61 static const uint64_t kMIPS32_ShadowOffset32 = 0x0aaa8000;
62 static const uint64_t kFreeBSD_ShadowOffset32 = 1ULL << 30;
63 static const uint64_t kFreeBSD_ShadowOffset64 = 1ULL << 46;
65 static const size_t kMinStackMallocSize = 1 << 6; // 64B
66 static const size_t kMaxStackMallocSize = 1 << 16; // 64K
67 static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3;
68 static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E;
70 static const char *const kAsanModuleCtorName = "asan.module_ctor";
71 static const char *const kAsanModuleDtorName = "asan.module_dtor";
72 static const int kAsanCtorAndCtorPriority = 1;
73 static const char *const kAsanReportErrorTemplate = "__asan_report_";
74 static const char *const kAsanReportLoadN = "__asan_report_load_n";
75 static const char *const kAsanReportStoreN = "__asan_report_store_n";
76 static const char *const kAsanRegisterGlobalsName = "__asan_register_globals";
77 static const char *const kAsanUnregisterGlobalsName =
78 "__asan_unregister_globals";
79 static const char *const kAsanPoisonGlobalsName = "__asan_before_dynamic_init";
80 static const char *const kAsanUnpoisonGlobalsName = "__asan_after_dynamic_init";
81 static const char *const kAsanInitName = "__asan_init_v3";
82 static const char *const kAsanCovName = "__sanitizer_cov";
83 static const char *const kAsanPtrCmp = "__sanitizer_ptr_cmp";
84 static const char *const kAsanPtrSub = "__sanitizer_ptr_sub";
85 static const char *const kAsanHandleNoReturnName = "__asan_handle_no_return";
86 static const int kMaxAsanStackMallocSizeClass = 10;
87 static const char *const kAsanStackMallocNameTemplate = "__asan_stack_malloc_";
88 static const char *const kAsanStackFreeNameTemplate = "__asan_stack_free_";
89 static const char *const kAsanGenPrefix = "__asan_gen_";
90 static const char *const kAsanPoisonStackMemoryName =
91 "__asan_poison_stack_memory";
92 static const char *const kAsanUnpoisonStackMemoryName =
93 "__asan_unpoison_stack_memory";
95 static const char *const kAsanOptionDetectUAR =
96 "__asan_option_detect_stack_use_after_return";
99 static const int kAsanStackAfterReturnMagic = 0xf5;
102 // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
103 static const size_t kNumberOfAccessSizes = 5;
105 // Command-line flags.
107 // This flag may need to be replaced with -f[no-]asan-reads.
108 static cl::opt<bool> ClInstrumentReads("asan-instrument-reads",
109 cl::desc("instrument read instructions"), cl::Hidden, cl::init(true));
110 static cl::opt<bool> ClInstrumentWrites("asan-instrument-writes",
111 cl::desc("instrument write instructions"), cl::Hidden, cl::init(true));
112 static cl::opt<bool> ClInstrumentAtomics("asan-instrument-atomics",
113 cl::desc("instrument atomic instructions (rmw, cmpxchg)"),
114 cl::Hidden, cl::init(true));
115 static cl::opt<bool> ClAlwaysSlowPath("asan-always-slow-path",
116 cl::desc("use instrumentation with slow path for all accesses"),
117 cl::Hidden, cl::init(false));
118 // This flag limits the number of instructions to be instrumented
119 // in any given BB. Normally, this should be set to unlimited (INT_MAX),
120 // but due to http://llvm.org/bugs/show_bug.cgi?id=12652 we temporary
122 static cl::opt<int> ClMaxInsnsToInstrumentPerBB("asan-max-ins-per-bb",
124 cl::desc("maximal number of instructions to instrument in any given BB"),
126 // This flag may need to be replaced with -f[no]asan-stack.
127 static cl::opt<bool> ClStack("asan-stack",
128 cl::desc("Handle stack memory"), cl::Hidden, cl::init(true));
129 // This flag may need to be replaced with -f[no]asan-use-after-return.
130 static cl::opt<bool> ClUseAfterReturn("asan-use-after-return",
131 cl::desc("Check return-after-free"), cl::Hidden, cl::init(false));
132 // This flag may need to be replaced with -f[no]asan-globals.
133 static cl::opt<bool> ClGlobals("asan-globals",
134 cl::desc("Handle global objects"), cl::Hidden, cl::init(true));
135 static cl::opt<int> ClCoverage("asan-coverage",
136 cl::desc("ASan coverage. 0: none, 1: entry block, 2: all blocks"),
137 cl::Hidden, cl::init(false));
138 static cl::opt<int> ClCoverageBlockThreshold("asan-coverage-block-threshold",
139 cl::desc("Add coverage instrumentation only to the entry block if there "
140 "are more than this number of blocks."),
141 cl::Hidden, cl::init(1500));
142 static cl::opt<bool> ClInitializers("asan-initialization-order",
143 cl::desc("Handle C++ initializer order"), cl::Hidden, cl::init(false));
144 static cl::opt<bool> ClMemIntrin("asan-memintrin",
145 cl::desc("Handle memset/memcpy/memmove"), cl::Hidden, cl::init(true));
146 static cl::opt<bool> ClInvalidPointerPairs("asan-detect-invalid-pointer-pair",
147 cl::desc("Instrument <, <=, >, >=, - with pointer operands"),
148 cl::Hidden, cl::init(false));
149 static cl::opt<unsigned> ClRealignStack("asan-realign-stack",
150 cl::desc("Realign stack to the value of this flag (power of two)"),
151 cl::Hidden, cl::init(32));
152 static cl::opt<std::string> ClBlacklistFile("asan-blacklist",
153 cl::desc("File containing the list of objects to ignore "
154 "during instrumentation"), cl::Hidden);
155 static cl::opt<int> ClInstrumentationWithCallsThreshold(
156 "asan-instrumentation-with-call-threshold",
157 cl::desc("If the function being instrumented contains more than "
158 "this number of memory accesses, use callbacks instead of "
159 "inline checks (-1 means never use callbacks)."),
160 cl::Hidden, cl::init(-1));
161 static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
162 "asan-memory-access-callback-prefix",
163 cl::desc("Prefix for memory access callbacks"), cl::Hidden,
164 cl::init("__asan_"));
166 // This is an experimental feature that will allow to choose between
167 // instrumented and non-instrumented code at link-time.
168 // If this option is on, just before instrumenting a function we create its
169 // clone; if the function is not changed by asan the clone is deleted.
170 // If we end up with a clone, we put the instrumented function into a section
171 // called "ASAN" and the uninstrumented function into a section called "NOASAN".
173 // This is still a prototype, we need to figure out a way to keep two copies of
174 // a function so that the linker can easily choose one of them.
175 static cl::opt<bool> ClKeepUninstrumented("asan-keep-uninstrumented-functions",
176 cl::desc("Keep uninstrumented copies of functions"),
177 cl::Hidden, cl::init(false));
179 // These flags allow to change the shadow mapping.
180 // The shadow mapping looks like
181 // Shadow = (Mem >> scale) + (1 << offset_log)
182 static cl::opt<int> ClMappingScale("asan-mapping-scale",
183 cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0));
185 // Optimization flags. Not user visible, used mostly for testing
186 // and benchmarking the tool.
187 static cl::opt<bool> ClOpt("asan-opt",
188 cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true));
189 static cl::opt<bool> ClOptSameTemp("asan-opt-same-temp",
190 cl::desc("Instrument the same temp just once"), cl::Hidden,
192 static cl::opt<bool> ClOptGlobals("asan-opt-globals",
193 cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true));
195 static cl::opt<bool> ClCheckLifetime("asan-check-lifetime",
196 cl::desc("Use llvm.lifetime intrinsics to insert extra checks"),
197 cl::Hidden, cl::init(false));
200 static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden,
202 static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"),
203 cl::Hidden, cl::init(0));
204 static cl::opt<std::string> ClDebugFunc("asan-debug-func",
205 cl::Hidden, cl::desc("Debug func"));
206 static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"),
207 cl::Hidden, cl::init(-1));
208 static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug man inst"),
209 cl::Hidden, cl::init(-1));
211 STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
212 STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
213 STATISTIC(NumOptimizedAccessesToGlobalArray,
214 "Number of optimized accesses to global arrays");
215 STATISTIC(NumOptimizedAccessesToGlobalVar,
216 "Number of optimized accesses to global vars");
219 /// A set of dynamically initialized globals extracted from metadata.
220 class SetOfDynamicallyInitializedGlobals {
222 void Init(Module& M) {
223 // Clang generates metadata identifying all dynamically initialized globals.
224 NamedMDNode *DynamicGlobals =
225 M.getNamedMetadata("llvm.asan.dynamically_initialized_globals");
228 for (int i = 0, n = DynamicGlobals->getNumOperands(); i < n; ++i) {
229 MDNode *MDN = DynamicGlobals->getOperand(i);
230 assert(MDN->getNumOperands() == 1);
231 Value *VG = MDN->getOperand(0);
232 // The optimizer may optimize away a global entirely, in which case we
233 // cannot instrument access to it.
236 DynInitGlobals.insert(cast<GlobalVariable>(VG));
239 bool Contains(GlobalVariable *G) { return DynInitGlobals.count(G) != 0; }
241 SmallSet<GlobalValue*, 32> DynInitGlobals;
244 /// This struct defines the shadow mapping using the rule:
245 /// shadow = (mem >> Scale) ADD-or-OR Offset.
246 struct ShadowMapping {
252 static ShadowMapping getShadowMapping(const Module &M, int LongSize) {
253 llvm::Triple TargetTriple(M.getTargetTriple());
254 bool IsAndroid = TargetTriple.getEnvironment() == llvm::Triple::Android;
255 // bool IsMacOSX = TargetTriple.getOS() == llvm::Triple::MacOSX;
256 bool IsFreeBSD = TargetTriple.getOS() == llvm::Triple::FreeBSD;
257 bool IsLinux = TargetTriple.getOS() == llvm::Triple::Linux;
258 bool IsPPC64 = TargetTriple.getArch() == llvm::Triple::ppc64 ||
259 TargetTriple.getArch() == llvm::Triple::ppc64le;
260 bool IsX86_64 = TargetTriple.getArch() == llvm::Triple::x86_64;
261 bool IsMIPS32 = TargetTriple.getArch() == llvm::Triple::mips ||
262 TargetTriple.getArch() == llvm::Triple::mipsel;
264 ShadowMapping Mapping;
266 if (LongSize == 32) {
270 Mapping.Offset = kMIPS32_ShadowOffset32;
272 Mapping.Offset = kFreeBSD_ShadowOffset32;
274 Mapping.Offset = kDefaultShadowOffset32;
275 } else { // LongSize == 64
277 Mapping.Offset = kPPC64_ShadowOffset64;
279 Mapping.Offset = kFreeBSD_ShadowOffset64;
280 else if (IsLinux && IsX86_64)
281 Mapping.Offset = kSmallX86_64ShadowOffset;
283 Mapping.Offset = kDefaultShadowOffset64;
286 Mapping.Scale = kDefaultShadowScale;
287 if (ClMappingScale) {
288 Mapping.Scale = ClMappingScale;
291 // OR-ing shadow offset if more efficient (at least on x86) if the offset
292 // is a power of two, but on ppc64 we have to use add since the shadow
293 // offset is not necessary 1/8-th of the address space.
294 Mapping.OrShadowOffset = !IsPPC64 && !(Mapping.Offset & (Mapping.Offset - 1));
299 static size_t RedzoneSizeForScale(int MappingScale) {
300 // Redzone used for stack and globals is at least 32 bytes.
301 // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively.
302 return std::max(32U, 1U << MappingScale);
305 /// AddressSanitizer: instrument the code in module to find memory bugs.
306 struct AddressSanitizer : public FunctionPass {
307 AddressSanitizer(bool CheckInitOrder = true,
308 bool CheckUseAfterReturn = false,
309 bool CheckLifetime = false,
310 StringRef BlacklistFile = StringRef())
312 CheckInitOrder(CheckInitOrder || ClInitializers),
313 CheckUseAfterReturn(CheckUseAfterReturn || ClUseAfterReturn),
314 CheckLifetime(CheckLifetime || ClCheckLifetime),
315 BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile
317 const char *getPassName() const override {
318 return "AddressSanitizerFunctionPass";
320 void instrumentMop(Instruction *I, bool UseCalls);
321 void instrumentPointerComparisonOrSubtraction(Instruction *I);
322 void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore,
323 Value *Addr, uint32_t TypeSize, bool IsWrite,
324 Value *SizeArgument, bool UseCalls);
325 Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
326 Value *ShadowValue, uint32_t TypeSize);
327 Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr,
328 bool IsWrite, size_t AccessSizeIndex,
329 Value *SizeArgument);
330 bool instrumentMemIntrinsic(MemIntrinsic *MI, bool UseCalls);
331 void instrumentMemIntrinsicParam(Instruction *OrigIns, Value *Addr,
332 Value *Size, Instruction *InsertBefore,
333 bool IsWrite, bool UseCalls);
334 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
335 bool runOnFunction(Function &F) override;
336 bool maybeInsertAsanInitAtFunctionEntry(Function &F);
337 bool doInitialization(Module &M) override;
338 static char ID; // Pass identification, replacement for typeid
341 void initializeCallbacks(Module &M);
343 bool LooksLikeCodeInBug11395(Instruction *I);
344 bool GlobalIsLinkerInitialized(GlobalVariable *G);
345 bool InjectCoverage(Function &F, const ArrayRef<BasicBlock*> AllBlocks);
346 void InjectCoverageAtBlock(Function &F, BasicBlock &BB);
349 bool CheckUseAfterReturn;
351 SmallString<64> BlacklistFile;
354 const DataLayout *DL;
357 ShadowMapping Mapping;
358 Function *AsanCtorFunction;
359 Function *AsanInitFunction;
360 Function *AsanHandleNoReturnFunc;
361 Function *AsanCovFunction;
362 Function *AsanPtrCmpFunction, *AsanPtrSubFunction;
363 std::unique_ptr<SpecialCaseList> BL;
364 // This array is indexed by AccessIsWrite and log2(AccessSize).
365 Function *AsanErrorCallback[2][kNumberOfAccessSizes];
366 Function *AsanMemoryAccessCallback[2][kNumberOfAccessSizes];
367 // This array is indexed by AccessIsWrite.
368 Function *AsanErrorCallbackSized[2],
369 *AsanMemoryAccessCallbackSized[2];
371 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals;
373 friend struct FunctionStackPoisoner;
376 class AddressSanitizerModule : public ModulePass {
378 AddressSanitizerModule(bool CheckInitOrder = true,
379 StringRef BlacklistFile = StringRef())
381 CheckInitOrder(CheckInitOrder || ClInitializers),
382 BlacklistFile(BlacklistFile.empty() ? ClBlacklistFile
384 bool runOnModule(Module &M) override;
385 static char ID; // Pass identification, replacement for typeid
386 const char *getPassName() const override {
387 return "AddressSanitizerModule";
391 void initializeCallbacks(Module &M);
393 bool ShouldInstrumentGlobal(GlobalVariable *G);
394 void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName);
395 size_t MinRedzoneSizeForGlobal() const {
396 return RedzoneSizeForScale(Mapping.Scale);
400 SmallString<64> BlacklistFile;
402 std::unique_ptr<SpecialCaseList> BL;
403 SetOfDynamicallyInitializedGlobals DynamicallyInitializedGlobals;
406 const DataLayout *DL;
407 ShadowMapping Mapping;
408 Function *AsanPoisonGlobals;
409 Function *AsanUnpoisonGlobals;
410 Function *AsanRegisterGlobals;
411 Function *AsanUnregisterGlobals;
414 // Stack poisoning does not play well with exception handling.
415 // When an exception is thrown, we essentially bypass the code
416 // that unpoisones the stack. This is why the run-time library has
417 // to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire
418 // stack in the interceptor. This however does not work inside the
419 // actual function which catches the exception. Most likely because the
420 // compiler hoists the load of the shadow value somewhere too high.
421 // This causes asan to report a non-existing bug on 453.povray.
422 // It sounds like an LLVM bug.
423 struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
425 AddressSanitizer &ASan;
430 ShadowMapping Mapping;
432 SmallVector<AllocaInst*, 16> AllocaVec;
433 SmallVector<Instruction*, 8> RetVec;
434 unsigned StackAlignment;
436 Function *AsanStackMallocFunc[kMaxAsanStackMallocSizeClass + 1],
437 *AsanStackFreeFunc[kMaxAsanStackMallocSizeClass + 1];
438 Function *AsanPoisonStackMemoryFunc, *AsanUnpoisonStackMemoryFunc;
440 // Stores a place and arguments of poisoning/unpoisoning call for alloca.
441 struct AllocaPoisonCall {
442 IntrinsicInst *InsBefore;
447 SmallVector<AllocaPoisonCall, 8> AllocaPoisonCallVec;
449 // Maps Value to an AllocaInst from which the Value is originated.
450 typedef DenseMap<Value*, AllocaInst*> AllocaForValueMapTy;
451 AllocaForValueMapTy AllocaForValue;
453 FunctionStackPoisoner(Function &F, AddressSanitizer &ASan)
454 : F(F), ASan(ASan), DIB(*F.getParent()), C(ASan.C),
455 IntptrTy(ASan.IntptrTy), IntptrPtrTy(PointerType::get(IntptrTy, 0)),
456 Mapping(ASan.Mapping),
457 StackAlignment(1 << Mapping.Scale) {}
459 bool runOnFunction() {
460 if (!ClStack) return false;
461 // Collect alloca, ret, lifetime instructions etc.
462 for (BasicBlock *BB : depth_first(&F.getEntryBlock()))
465 if (AllocaVec.empty()) return false;
467 initializeCallbacks(*F.getParent());
477 // Finds all static Alloca instructions and puts
478 // poisoned red zones around all of them.
479 // Then unpoison everything back before the function returns.
482 // ----------------------- Visitors.
483 /// \brief Collect all Ret instructions.
484 void visitReturnInst(ReturnInst &RI) {
485 RetVec.push_back(&RI);
488 /// \brief Collect Alloca instructions we want (and can) handle.
489 void visitAllocaInst(AllocaInst &AI) {
490 if (!isInterestingAlloca(AI)) return;
492 StackAlignment = std::max(StackAlignment, AI.getAlignment());
493 AllocaVec.push_back(&AI);
496 /// \brief Collect lifetime intrinsic calls to check for use-after-scope
498 void visitIntrinsicInst(IntrinsicInst &II) {
499 if (!ASan.CheckLifetime) return;
500 Intrinsic::ID ID = II.getIntrinsicID();
501 if (ID != Intrinsic::lifetime_start &&
502 ID != Intrinsic::lifetime_end)
504 // Found lifetime intrinsic, add ASan instrumentation if necessary.
505 ConstantInt *Size = dyn_cast<ConstantInt>(II.getArgOperand(0));
506 // If size argument is undefined, don't do anything.
507 if (Size->isMinusOne()) return;
508 // Check that size doesn't saturate uint64_t and can
509 // be stored in IntptrTy.
510 const uint64_t SizeValue = Size->getValue().getLimitedValue();
511 if (SizeValue == ~0ULL ||
512 !ConstantInt::isValueValidForType(IntptrTy, SizeValue))
514 // Find alloca instruction that corresponds to llvm.lifetime argument.
515 AllocaInst *AI = findAllocaForValue(II.getArgOperand(1));
517 bool DoPoison = (ID == Intrinsic::lifetime_end);
518 AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
519 AllocaPoisonCallVec.push_back(APC);
522 // ---------------------- Helpers.
523 void initializeCallbacks(Module &M);
525 // Check if we want (and can) handle this alloca.
526 bool isInterestingAlloca(AllocaInst &AI) const {
527 return (!AI.isArrayAllocation() && AI.isStaticAlloca() &&
528 AI.getAllocatedType()->isSized() &&
529 // alloca() may be called with 0 size, ignore it.
530 getAllocaSizeInBytes(&AI) > 0);
533 uint64_t getAllocaSizeInBytes(AllocaInst *AI) const {
534 Type *Ty = AI->getAllocatedType();
535 uint64_t SizeInBytes = ASan.DL->getTypeAllocSize(Ty);
538 /// Finds alloca where the value comes from.
539 AllocaInst *findAllocaForValue(Value *V);
540 void poisonRedZones(const ArrayRef<uint8_t> ShadowBytes, IRBuilder<> &IRB,
541 Value *ShadowBase, bool DoPoison);
542 void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison);
544 void SetShadowToStackAfterReturnInlined(IRBuilder<> &IRB, Value *ShadowBase,
550 char AddressSanitizer::ID = 0;
551 INITIALIZE_PASS(AddressSanitizer, "asan",
552 "AddressSanitizer: detects use-after-free and out-of-bounds bugs.",
554 FunctionPass *llvm::createAddressSanitizerFunctionPass(
555 bool CheckInitOrder, bool CheckUseAfterReturn, bool CheckLifetime,
556 StringRef BlacklistFile) {
557 return new AddressSanitizer(CheckInitOrder, CheckUseAfterReturn,
558 CheckLifetime, BlacklistFile);
561 char AddressSanitizerModule::ID = 0;
562 INITIALIZE_PASS(AddressSanitizerModule, "asan-module",
563 "AddressSanitizer: detects use-after-free and out-of-bounds bugs."
564 "ModulePass", false, false)
565 ModulePass *llvm::createAddressSanitizerModulePass(
566 bool CheckInitOrder, StringRef BlacklistFile) {
567 return new AddressSanitizerModule(CheckInitOrder, BlacklistFile);
570 static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
571 size_t Res = countTrailingZeros(TypeSize / 8);
572 assert(Res < kNumberOfAccessSizes);
576 // \brief Create a constant for Str so that we can pass it to the run-time lib.
577 static GlobalVariable *createPrivateGlobalForString(
578 Module &M, StringRef Str, bool AllowMerging) {
579 Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str);
580 // We use private linkage for module-local strings. If they can be merged
581 // with another one, we set the unnamed_addr attribute.
583 new GlobalVariable(M, StrConst->getType(), true,
584 GlobalValue::PrivateLinkage, StrConst, kAsanGenPrefix);
586 GV->setUnnamedAddr(true);
587 GV->setAlignment(1); // Strings may not be merged w/o setting align 1.
591 static bool GlobalWasGeneratedByAsan(GlobalVariable *G) {
592 return G->getName().find(kAsanGenPrefix) == 0;
595 Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) {
597 Shadow = IRB.CreateLShr(Shadow, Mapping.Scale);
598 if (Mapping.Offset == 0)
600 // (Shadow >> scale) | offset
601 if (Mapping.OrShadowOffset)
602 return IRB.CreateOr(Shadow, ConstantInt::get(IntptrTy, Mapping.Offset));
604 return IRB.CreateAdd(Shadow, ConstantInt::get(IntptrTy, Mapping.Offset));
607 void AddressSanitizer::instrumentMemIntrinsicParam(Instruction *OrigIns,
608 Value *Addr, Value *Size,
609 Instruction *InsertBefore,
612 IRBuilder<> IRB(InsertBefore);
613 if (Size->getType() != IntptrTy)
614 Size = IRB.CreateIntCast(Size, IntptrTy, false);
615 // Check the first byte.
616 instrumentAddress(OrigIns, InsertBefore, Addr, 8, IsWrite, Size, false);
617 // Check the last byte.
618 IRB.SetInsertPoint(InsertBefore);
619 Value *SizeMinusOne = IRB.CreateSub(Size, ConstantInt::get(IntptrTy, 1));
620 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
621 Value *AddrLast = IRB.CreateAdd(AddrLong, SizeMinusOne);
622 instrumentAddress(OrigIns, InsertBefore, AddrLast, 8, IsWrite, Size, false);
625 // Instrument memset/memmove/memcpy
626 bool AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI, bool UseCalls) {
627 Value *Dst = MI->getDest();
628 MemTransferInst *MemTran = dyn_cast<MemTransferInst>(MI);
629 Value *Src = MemTran ? MemTran->getSource() : 0;
630 Value *Length = MI->getLength();
632 Constant *ConstLength = dyn_cast<Constant>(Length);
633 Instruction *InsertBefore = MI;
635 if (ConstLength->isNullValue()) return false;
637 // The size is not a constant so it could be zero -- check at run-time.
638 IRBuilder<> IRB(InsertBefore);
640 Value *Cmp = IRB.CreateICmpNE(Length,
641 Constant::getNullValue(Length->getType()));
642 InsertBefore = SplitBlockAndInsertIfThen(Cmp, InsertBefore, false);
645 instrumentMemIntrinsicParam(MI, Dst, Length, InsertBefore, true, UseCalls);
647 instrumentMemIntrinsicParam(MI, Src, Length, InsertBefore, false, UseCalls);
651 // If I is an interesting memory access, return the PointerOperand
652 // and set IsWrite. Otherwise return NULL.
653 static Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite) {
654 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
655 if (!ClInstrumentReads) return NULL;
657 return LI->getPointerOperand();
659 if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
660 if (!ClInstrumentWrites) return NULL;
662 return SI->getPointerOperand();
664 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
665 if (!ClInstrumentAtomics) return NULL;
667 return RMW->getPointerOperand();
669 if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
670 if (!ClInstrumentAtomics) return NULL;
672 return XCHG->getPointerOperand();
677 static bool isPointerOperand(Value *V) {
678 return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
681 // This is a rough heuristic; it may cause both false positives and
682 // false negatives. The proper implementation requires cooperation with
684 static bool isInterestingPointerComparisonOrSubtraction(Instruction *I) {
685 if (ICmpInst *Cmp = dyn_cast<ICmpInst>(I)) {
686 if (!Cmp->isRelational())
688 } else if (BinaryOperator *BO = dyn_cast<BinaryOperator>(I)) {
689 if (BO->getOpcode() != Instruction::Sub)
694 if (!isPointerOperand(I->getOperand(0)) ||
695 !isPointerOperand(I->getOperand(1)))
700 bool AddressSanitizer::GlobalIsLinkerInitialized(GlobalVariable *G) {
701 // If a global variable does not have dynamic initialization we don't
702 // have to instrument it. However, if a global does not have initializer
703 // at all, we assume it has dynamic initializer (in other TU).
704 return G->hasInitializer() && !DynamicallyInitializedGlobals.Contains(G);
708 AddressSanitizer::instrumentPointerComparisonOrSubtraction(Instruction *I) {
710 Function *F = isa<ICmpInst>(I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
711 Value *Param[2] = {I->getOperand(0), I->getOperand(1)};
712 for (int i = 0; i < 2; i++) {
713 if (Param[i]->getType()->isPointerTy())
714 Param[i] = IRB.CreatePointerCast(Param[i], IntptrTy);
716 IRB.CreateCall2(F, Param[0], Param[1]);
719 void AddressSanitizer::instrumentMop(Instruction *I, bool UseCalls) {
720 bool IsWrite = false;
721 Value *Addr = isInterestingMemoryAccess(I, &IsWrite);
723 if (ClOpt && ClOptGlobals) {
724 if (GlobalVariable *G = dyn_cast<GlobalVariable>(Addr)) {
725 // If initialization order checking is disabled, a simple access to a
726 // dynamically initialized global is always valid.
727 if (!CheckInitOrder || GlobalIsLinkerInitialized(G)) {
728 NumOptimizedAccessesToGlobalVar++;
732 ConstantExpr *CE = dyn_cast<ConstantExpr>(Addr);
733 if (CE && CE->isGEPWithNoNotionalOverIndexing()) {
734 if (GlobalVariable *G = dyn_cast<GlobalVariable>(CE->getOperand(0))) {
735 if (CE->getOperand(1)->isNullValue() && GlobalIsLinkerInitialized(G)) {
736 NumOptimizedAccessesToGlobalArray++;
743 Type *OrigPtrTy = Addr->getType();
744 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType();
746 assert(OrigTy->isSized());
747 uint32_t TypeSize = DL->getTypeStoreSizeInBits(OrigTy);
749 assert((TypeSize % 8) == 0);
752 NumInstrumentedWrites++;
754 NumInstrumentedReads++;
756 // Instrument a 1-, 2-, 4-, 8-, or 16- byte access with one check.
757 if (TypeSize == 8 || TypeSize == 16 ||
758 TypeSize == 32 || TypeSize == 64 || TypeSize == 128)
759 return instrumentAddress(I, I, Addr, TypeSize, IsWrite, 0, UseCalls);
760 // Instrument unusual size (but still multiple of 8).
761 // We can not do it with a single check, so we do 1-byte check for the first
762 // and the last bytes. We call __asan_report_*_n(addr, real_size) to be able
763 // to report the actual access size.
765 Value *LastByte = IRB.CreateIntToPtr(
766 IRB.CreateAdd(IRB.CreatePointerCast(Addr, IntptrTy),
767 ConstantInt::get(IntptrTy, TypeSize / 8 - 1)),
769 Value *Size = ConstantInt::get(IntptrTy, TypeSize / 8);
770 instrumentAddress(I, I, Addr, 8, IsWrite, Size, false);
771 instrumentAddress(I, I, LastByte, 8, IsWrite, Size, false);
774 // Validate the result of Module::getOrInsertFunction called for an interface
775 // function of AddressSanitizer. If the instrumented module defines a function
776 // with the same name, their prototypes must match, otherwise
777 // getOrInsertFunction returns a bitcast.
778 static Function *checkInterfaceFunction(Constant *FuncOrBitcast) {
779 if (isa<Function>(FuncOrBitcast)) return cast<Function>(FuncOrBitcast);
780 FuncOrBitcast->dump();
781 report_fatal_error("trying to redefine an AddressSanitizer "
782 "interface function");
785 Instruction *AddressSanitizer::generateCrashCode(
786 Instruction *InsertBefore, Value *Addr,
787 bool IsWrite, size_t AccessSizeIndex, Value *SizeArgument) {
788 IRBuilder<> IRB(InsertBefore);
789 CallInst *Call = SizeArgument
790 ? IRB.CreateCall2(AsanErrorCallbackSized[IsWrite], Addr, SizeArgument)
791 : IRB.CreateCall(AsanErrorCallback[IsWrite][AccessSizeIndex], Addr);
793 // We don't do Call->setDoesNotReturn() because the BB already has
794 // UnreachableInst at the end.
795 // This EmptyAsm is required to avoid callback merge.
796 IRB.CreateCall(EmptyAsm);
800 Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
803 size_t Granularity = 1 << Mapping.Scale;
804 // Addr & (Granularity - 1)
805 Value *LastAccessedByte = IRB.CreateAnd(
806 AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
807 // (Addr & (Granularity - 1)) + size - 1
808 if (TypeSize / 8 > 1)
809 LastAccessedByte = IRB.CreateAdd(
810 LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1));
811 // (uint8_t) ((Addr & (Granularity-1)) + size - 1)
812 LastAccessedByte = IRB.CreateIntCast(
813 LastAccessedByte, ShadowValue->getType(), false);
814 // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue
815 return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue);
818 void AddressSanitizer::instrumentAddress(Instruction *OrigIns,
819 Instruction *InsertBefore, Value *Addr,
820 uint32_t TypeSize, bool IsWrite,
821 Value *SizeArgument, bool UseCalls) {
822 IRBuilder<> IRB(InsertBefore);
823 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
824 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
827 IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][AccessSizeIndex],
832 Type *ShadowTy = IntegerType::get(
833 *C, std::max(8U, TypeSize >> Mapping.Scale));
834 Type *ShadowPtrTy = PointerType::get(ShadowTy, 0);
835 Value *ShadowPtr = memToShadow(AddrLong, IRB);
836 Value *CmpVal = Constant::getNullValue(ShadowTy);
837 Value *ShadowValue = IRB.CreateLoad(
838 IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy));
840 Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal);
841 size_t Granularity = 1 << Mapping.Scale;
842 TerminatorInst *CrashTerm = 0;
844 if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) {
845 TerminatorInst *CheckTerm =
846 SplitBlockAndInsertIfThen(Cmp, InsertBefore, false);
847 assert(dyn_cast<BranchInst>(CheckTerm)->isUnconditional());
848 BasicBlock *NextBB = CheckTerm->getSuccessor(0);
849 IRB.SetInsertPoint(CheckTerm);
850 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize);
851 BasicBlock *CrashBlock =
852 BasicBlock::Create(*C, "", NextBB->getParent(), NextBB);
853 CrashTerm = new UnreachableInst(*C, CrashBlock);
854 BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2);
855 ReplaceInstWithInst(CheckTerm, NewTerm);
857 CrashTerm = SplitBlockAndInsertIfThen(Cmp, InsertBefore, true);
860 Instruction *Crash = generateCrashCode(
861 CrashTerm, AddrLong, IsWrite, AccessSizeIndex, SizeArgument);
862 Crash->setDebugLoc(OrigIns->getDebugLoc());
865 void AddressSanitizerModule::createInitializerPoisonCalls(
866 Module &M, GlobalValue *ModuleName) {
867 // We do all of our poisoning and unpoisoning within _GLOBAL__I_a.
868 Function *GlobalInit = M.getFunction("_GLOBAL__I_a");
869 // If that function is not present, this TU contains no globals, or they have
870 // all been optimized away
874 // Set up the arguments to our poison/unpoison functions.
875 IRBuilder<> IRB(GlobalInit->begin()->getFirstInsertionPt());
877 // Add a call to poison all external globals before the given function starts.
878 Value *ModuleNameAddr = ConstantExpr::getPointerCast(ModuleName, IntptrTy);
879 IRB.CreateCall(AsanPoisonGlobals, ModuleNameAddr);
881 // Add calls to unpoison all globals before each return instruction.
882 for (Function::iterator I = GlobalInit->begin(), E = GlobalInit->end();
884 if (ReturnInst *RI = dyn_cast<ReturnInst>(I->getTerminator())) {
885 CallInst::Create(AsanUnpoisonGlobals, "", RI);
890 bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) {
891 Type *Ty = cast<PointerType>(G->getType())->getElementType();
892 DEBUG(dbgs() << "GLOBAL: " << *G << "\n");
894 if (BL->isIn(*G)) return false;
895 if (!Ty->isSized()) return false;
896 if (!G->hasInitializer()) return false;
897 if (GlobalWasGeneratedByAsan(G)) return false; // Our own global.
898 // Touch only those globals that will not be defined in other modules.
899 // Don't handle ODR type linkages since other modules may be built w/o asan.
900 if (G->getLinkage() != GlobalVariable::ExternalLinkage &&
901 G->getLinkage() != GlobalVariable::PrivateLinkage &&
902 G->getLinkage() != GlobalVariable::InternalLinkage)
904 // Two problems with thread-locals:
905 // - The address of the main thread's copy can't be computed at link-time.
906 // - Need to poison all copies, not just the main thread's one.
907 if (G->isThreadLocal())
909 // For now, just ignore this Global if the alignment is large.
910 if (G->getAlignment() > MinRedzoneSizeForGlobal()) return false;
912 // Ignore all the globals with the names starting with "\01L_OBJC_".
913 // Many of those are put into the .cstring section. The linker compresses
914 // that section by removing the spare \0s after the string terminator, so
915 // our redzones get broken.
916 if ((G->getName().find("\01L_OBJC_") == 0) ||
917 (G->getName().find("\01l_OBJC_") == 0)) {
918 DEBUG(dbgs() << "Ignoring \\01L_OBJC_* global: " << *G << "\n");
922 if (G->hasSection()) {
923 StringRef Section(G->getSection());
924 // Ignore the globals from the __OBJC section. The ObjC runtime assumes
925 // those conform to /usr/lib/objc/runtime.h, so we can't add redzones to
927 if ((Section.find("__OBJC,") == 0) ||
928 (Section.find("__DATA, __objc_") == 0)) {
929 DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G << "\n");
932 // See http://code.google.com/p/address-sanitizer/issues/detail?id=32
933 // Constant CFString instances are compiled in the following way:
934 // -- the string buffer is emitted into
935 // __TEXT,__cstring,cstring_literals
936 // -- the constant NSConstantString structure referencing that buffer
937 // is placed into __DATA,__cfstring
938 // Therefore there's no point in placing redzones into __DATA,__cfstring.
939 // Moreover, it causes the linker to crash on OS X 10.7
940 if (Section.find("__DATA,__cfstring") == 0) {
941 DEBUG(dbgs() << "Ignoring CFString: " << *G << "\n");
944 // The linker merges the contents of cstring_literals and removes the
946 if (Section.find("__TEXT,__cstring,cstring_literals") == 0) {
947 DEBUG(dbgs() << "Ignoring a cstring literal: " << *G << "\n");
950 // Globals from llvm.metadata aren't emitted, do not instrument them.
951 if (Section == "llvm.metadata") return false;
957 void AddressSanitizerModule::initializeCallbacks(Module &M) {
959 // Declare our poisoning and unpoisoning functions.
960 AsanPoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction(
961 kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy, NULL));
962 AsanPoisonGlobals->setLinkage(Function::ExternalLinkage);
963 AsanUnpoisonGlobals = checkInterfaceFunction(M.getOrInsertFunction(
964 kAsanUnpoisonGlobalsName, IRB.getVoidTy(), NULL));
965 AsanUnpoisonGlobals->setLinkage(Function::ExternalLinkage);
966 // Declare functions that register/unregister globals.
967 AsanRegisterGlobals = checkInterfaceFunction(M.getOrInsertFunction(
968 kAsanRegisterGlobalsName, IRB.getVoidTy(),
969 IntptrTy, IntptrTy, NULL));
970 AsanRegisterGlobals->setLinkage(Function::ExternalLinkage);
971 AsanUnregisterGlobals = checkInterfaceFunction(M.getOrInsertFunction(
972 kAsanUnregisterGlobalsName,
973 IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
974 AsanUnregisterGlobals->setLinkage(Function::ExternalLinkage);
977 // This function replaces all global variables with new variables that have
978 // trailing redzones. It also creates a function that poisons
979 // redzones and inserts this function into llvm.global_ctors.
980 bool AddressSanitizerModule::runOnModule(Module &M) {
981 if (!ClGlobals) return false;
983 DataLayoutPass *DLP = getAnalysisIfAvailable<DataLayoutPass>();
986 DL = &DLP->getDataLayout();
988 BL.reset(SpecialCaseList::createOrDie(BlacklistFile));
989 if (BL->isIn(M)) return false;
990 C = &(M.getContext());
991 int LongSize = DL->getPointerSizeInBits();
992 IntptrTy = Type::getIntNTy(*C, LongSize);
993 Mapping = getShadowMapping(M, LongSize);
994 initializeCallbacks(M);
995 DynamicallyInitializedGlobals.Init(M);
997 SmallVector<GlobalVariable *, 16> GlobalsToChange;
999 for (Module::GlobalListType::iterator G = M.global_begin(),
1000 E = M.global_end(); G != E; ++G) {
1001 if (ShouldInstrumentGlobal(G))
1002 GlobalsToChange.push_back(G);
1005 size_t n = GlobalsToChange.size();
1006 if (n == 0) return false;
1008 // A global is described by a structure
1011 // size_t size_with_redzone;
1012 // const char *name;
1013 // const char *module_name;
1014 // size_t has_dynamic_init;
1015 // We initialize an array of such structures and pass it to a run-time call.
1016 StructType *GlobalStructTy = StructType::get(IntptrTy, IntptrTy,
1018 IntptrTy, IntptrTy, NULL);
1019 SmallVector<Constant *, 16> Initializers(n);
1021 Function *CtorFunc = M.getFunction(kAsanModuleCtorName);
1023 IRBuilder<> IRB(CtorFunc->getEntryBlock().getTerminator());
1025 bool HasDynamicallyInitializedGlobals = false;
1027 // We shouldn't merge same module names, as this string serves as unique
1028 // module ID in runtime.
1029 GlobalVariable *ModuleName = createPrivateGlobalForString(
1030 M, M.getModuleIdentifier(), /*AllowMerging*/false);
1032 for (size_t i = 0; i < n; i++) {
1033 static const uint64_t kMaxGlobalRedzone = 1 << 18;
1034 GlobalVariable *G = GlobalsToChange[i];
1035 PointerType *PtrTy = cast<PointerType>(G->getType());
1036 Type *Ty = PtrTy->getElementType();
1037 uint64_t SizeInBytes = DL->getTypeAllocSize(Ty);
1038 uint64_t MinRZ = MinRedzoneSizeForGlobal();
1039 // MinRZ <= RZ <= kMaxGlobalRedzone
1040 // and trying to make RZ to be ~ 1/4 of SizeInBytes.
1041 uint64_t RZ = std::max(MinRZ,
1042 std::min(kMaxGlobalRedzone,
1043 (SizeInBytes / MinRZ / 4) * MinRZ));
1044 uint64_t RightRedzoneSize = RZ;
1045 // Round up to MinRZ
1046 if (SizeInBytes % MinRZ)
1047 RightRedzoneSize += MinRZ - (SizeInBytes % MinRZ);
1048 assert(((RightRedzoneSize + SizeInBytes) % MinRZ) == 0);
1049 Type *RightRedZoneTy = ArrayType::get(IRB.getInt8Ty(), RightRedzoneSize);
1050 // Determine whether this global should be poisoned in initialization.
1051 bool GlobalHasDynamicInitializer =
1052 DynamicallyInitializedGlobals.Contains(G);
1053 // Don't check initialization order if this global is blacklisted.
1054 GlobalHasDynamicInitializer &= !BL->isIn(*G, "init");
1056 StructType *NewTy = StructType::get(Ty, RightRedZoneTy, NULL);
1057 Constant *NewInitializer = ConstantStruct::get(
1058 NewTy, G->getInitializer(),
1059 Constant::getNullValue(RightRedZoneTy), NULL);
1061 GlobalVariable *Name =
1062 createPrivateGlobalForString(M, G->getName(), /*AllowMerging*/true);
1064 // Create a new global variable with enough space for a redzone.
1065 GlobalValue::LinkageTypes Linkage = G->getLinkage();
1066 if (G->isConstant() && Linkage == GlobalValue::PrivateLinkage)
1067 Linkage = GlobalValue::InternalLinkage;
1068 GlobalVariable *NewGlobal = new GlobalVariable(
1069 M, NewTy, G->isConstant(), Linkage,
1070 NewInitializer, "", G, G->getThreadLocalMode());
1071 NewGlobal->copyAttributesFrom(G);
1072 NewGlobal->setAlignment(MinRZ);
1075 Indices2[0] = IRB.getInt32(0);
1076 Indices2[1] = IRB.getInt32(0);
1078 G->replaceAllUsesWith(
1079 ConstantExpr::getGetElementPtr(NewGlobal, Indices2, true));
1080 NewGlobal->takeName(G);
1081 G->eraseFromParent();
1083 Initializers[i] = ConstantStruct::get(
1085 ConstantExpr::getPointerCast(NewGlobal, IntptrTy),
1086 ConstantInt::get(IntptrTy, SizeInBytes),
1087 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
1088 ConstantExpr::getPointerCast(Name, IntptrTy),
1089 ConstantExpr::getPointerCast(ModuleName, IntptrTy),
1090 ConstantInt::get(IntptrTy, GlobalHasDynamicInitializer),
1093 // Populate the first and last globals declared in this TU.
1094 if (CheckInitOrder && GlobalHasDynamicInitializer)
1095 HasDynamicallyInitializedGlobals = true;
1097 DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n");
1100 ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n);
1101 GlobalVariable *AllGlobals = new GlobalVariable(
1102 M, ArrayOfGlobalStructTy, false, GlobalVariable::InternalLinkage,
1103 ConstantArray::get(ArrayOfGlobalStructTy, Initializers), "");
1105 // Create calls for poisoning before initializers run and unpoisoning after.
1106 if (CheckInitOrder && HasDynamicallyInitializedGlobals)
1107 createInitializerPoisonCalls(M, ModuleName);
1108 IRB.CreateCall2(AsanRegisterGlobals,
1109 IRB.CreatePointerCast(AllGlobals, IntptrTy),
1110 ConstantInt::get(IntptrTy, n));
1112 // We also need to unregister globals at the end, e.g. when a shared library
1114 Function *AsanDtorFunction = Function::Create(
1115 FunctionType::get(Type::getVoidTy(*C), false),
1116 GlobalValue::InternalLinkage, kAsanModuleDtorName, &M);
1117 BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction);
1118 IRBuilder<> IRB_Dtor(ReturnInst::Create(*C, AsanDtorBB));
1119 IRB_Dtor.CreateCall2(AsanUnregisterGlobals,
1120 IRB.CreatePointerCast(AllGlobals, IntptrTy),
1121 ConstantInt::get(IntptrTy, n));
1122 appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndCtorPriority);
1128 void AddressSanitizer::initializeCallbacks(Module &M) {
1129 IRBuilder<> IRB(*C);
1130 // Create __asan_report* callbacks.
1131 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
1132 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
1133 AccessSizeIndex++) {
1134 // IsWrite and TypeSize are encoded in the function name.
1135 std::string Suffix =
1136 (AccessIsWrite ? "store" : "load") + itostr(1 << AccessSizeIndex);
1137 AsanErrorCallback[AccessIsWrite][AccessSizeIndex] =
1138 checkInterfaceFunction(
1139 M.getOrInsertFunction(kAsanReportErrorTemplate + Suffix,
1140 IRB.getVoidTy(), IntptrTy, NULL));
1141 AsanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] =
1142 checkInterfaceFunction(
1143 M.getOrInsertFunction(ClMemoryAccessCallbackPrefix + Suffix,
1144 IRB.getVoidTy(), IntptrTy, NULL));
1147 AsanErrorCallbackSized[0] = checkInterfaceFunction(M.getOrInsertFunction(
1148 kAsanReportLoadN, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1149 AsanErrorCallbackSized[1] = checkInterfaceFunction(M.getOrInsertFunction(
1150 kAsanReportStoreN, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1152 AsanMemoryAccessCallbackSized[0] = checkInterfaceFunction(
1153 M.getOrInsertFunction(ClMemoryAccessCallbackPrefix + "loadN",
1154 IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1155 AsanMemoryAccessCallbackSized[1] = checkInterfaceFunction(
1156 M.getOrInsertFunction(ClMemoryAccessCallbackPrefix + "storeN",
1157 IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1159 AsanHandleNoReturnFunc = checkInterfaceFunction(M.getOrInsertFunction(
1160 kAsanHandleNoReturnName, IRB.getVoidTy(), NULL));
1161 AsanCovFunction = checkInterfaceFunction(M.getOrInsertFunction(
1162 kAsanCovName, IRB.getVoidTy(), NULL));
1163 AsanPtrCmpFunction = checkInterfaceFunction(M.getOrInsertFunction(
1164 kAsanPtrCmp, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1165 AsanPtrSubFunction = checkInterfaceFunction(M.getOrInsertFunction(
1166 kAsanPtrSub, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1167 // We insert an empty inline asm after __asan_report* to avoid callback merge.
1168 EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false),
1169 StringRef(""), StringRef(""),
1170 /*hasSideEffects=*/true);
1174 bool AddressSanitizer::doInitialization(Module &M) {
1175 // Initialize the private fields. No one has accessed them before.
1176 DataLayoutPass *DLP = getAnalysisIfAvailable<DataLayoutPass>();
1179 DL = &DLP->getDataLayout();
1181 BL.reset(SpecialCaseList::createOrDie(BlacklistFile));
1182 DynamicallyInitializedGlobals.Init(M);
1184 C = &(M.getContext());
1185 LongSize = DL->getPointerSizeInBits();
1186 IntptrTy = Type::getIntNTy(*C, LongSize);
1188 AsanCtorFunction = Function::Create(
1189 FunctionType::get(Type::getVoidTy(*C), false),
1190 GlobalValue::InternalLinkage, kAsanModuleCtorName, &M);
1191 BasicBlock *AsanCtorBB = BasicBlock::Create(*C, "", AsanCtorFunction);
1192 // call __asan_init in the module ctor.
1193 IRBuilder<> IRB(ReturnInst::Create(*C, AsanCtorBB));
1194 AsanInitFunction = checkInterfaceFunction(
1195 M.getOrInsertFunction(kAsanInitName, IRB.getVoidTy(), NULL));
1196 AsanInitFunction->setLinkage(Function::ExternalLinkage);
1197 IRB.CreateCall(AsanInitFunction);
1199 Mapping = getShadowMapping(M, LongSize);
1201 appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndCtorPriority);
1205 bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) {
1206 // For each NSObject descendant having a +load method, this method is invoked
1207 // by the ObjC runtime before any of the static constructors is called.
1208 // Therefore we need to instrument such methods with a call to __asan_init
1209 // at the beginning in order to initialize our runtime before any access to
1210 // the shadow memory.
1211 // We cannot just ignore these methods, because they may call other
1212 // instrumented functions.
1213 if (F.getName().find(" load]") != std::string::npos) {
1214 IRBuilder<> IRB(F.begin()->begin());
1215 IRB.CreateCall(AsanInitFunction);
1221 void AddressSanitizer::InjectCoverageAtBlock(Function &F, BasicBlock &BB) {
1222 BasicBlock::iterator IP = BB.getFirstInsertionPt(), BE = BB.end();
1223 // Skip static allocas at the top of the entry block so they don't become
1224 // dynamic when we split the block. If we used our optimized stack layout,
1225 // then there will only be one alloca and it will come first.
1226 for (; IP != BE; ++IP) {
1227 AllocaInst *AI = dyn_cast<AllocaInst>(IP);
1228 if (!AI || !AI->isStaticAlloca())
1232 IRBuilder<> IRB(IP);
1233 Type *Int8Ty = IRB.getInt8Ty();
1234 GlobalVariable *Guard = new GlobalVariable(
1235 *F.getParent(), Int8Ty, false, GlobalValue::PrivateLinkage,
1236 Constant::getNullValue(Int8Ty), "__asan_gen_cov_" + F.getName());
1237 LoadInst *Load = IRB.CreateLoad(Guard);
1238 Load->setAtomic(Monotonic);
1239 Load->setAlignment(1);
1240 Value *Cmp = IRB.CreateICmpEQ(Constant::getNullValue(Int8Ty), Load);
1241 Instruction *Ins = SplitBlockAndInsertIfThen(
1242 Cmp, IP, false, MDBuilder(*C).createBranchWeights(1, 100000));
1243 IRB.SetInsertPoint(Ins);
1244 // We pass &F to __sanitizer_cov. We could avoid this and rely on
1245 // GET_CALLER_PC, but having the PC of the first instruction is just nice.
1246 Instruction *Call = IRB.CreateCall(AsanCovFunction);
1247 Call->setDebugLoc(IP->getDebugLoc());
1248 StoreInst *Store = IRB.CreateStore(ConstantInt::get(Int8Ty, 1), Guard);
1249 Store->setAtomic(Monotonic);
1250 Store->setAlignment(1);
1253 // Poor man's coverage that works with ASan.
1254 // We create a Guard boolean variable with the same linkage
1255 // as the function and inject this code into the entry block (-asan-coverage=1)
1256 // or all blocks (-asan-coverage=2):
1258 // __sanitizer_cov(&F);
1261 // The accesses to Guard are atomic. The rest of the logic is
1262 // in __sanitizer_cov (it's fine to call it more than once).
1264 // This coverage implementation provides very limited data:
1265 // it only tells if a given function (block) was ever executed.
1266 // No counters, no per-edge data.
1267 // But for many use cases this is what we need and the added slowdown
1268 // is negligible. This simple implementation will probably be obsoleted
1269 // by the upcoming Clang-based coverage implementation.
1270 // By having it here and now we hope to
1271 // a) get the functionality to users earlier and
1272 // b) collect usage statistics to help improve Clang coverage design.
1273 bool AddressSanitizer::InjectCoverage(Function &F,
1274 const ArrayRef<BasicBlock *> AllBlocks) {
1275 if (!ClCoverage) return false;
1277 if (ClCoverage == 1 ||
1278 (unsigned)ClCoverageBlockThreshold < AllBlocks.size()) {
1279 InjectCoverageAtBlock(F, F.getEntryBlock());
1281 for (size_t i = 0, n = AllBlocks.size(); i < n; i++)
1282 InjectCoverageAtBlock(F, *AllBlocks[i]);
1287 bool AddressSanitizer::runOnFunction(Function &F) {
1288 if (BL->isIn(F)) return false;
1289 if (&F == AsanCtorFunction) return false;
1290 if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false;
1291 DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n");
1292 initializeCallbacks(*F.getParent());
1294 // If needed, insert __asan_init before checking for SanitizeAddress attr.
1295 maybeInsertAsanInitAtFunctionEntry(F);
1297 if (!F.hasFnAttribute(Attribute::SanitizeAddress))
1300 if (!ClDebugFunc.empty() && ClDebugFunc != F.getName())
1303 // We want to instrument every address only once per basic block (unless there
1304 // are calls between uses).
1305 SmallSet<Value*, 16> TempsToInstrument;
1306 SmallVector<Instruction*, 16> ToInstrument;
1307 SmallVector<Instruction*, 8> NoReturnCalls;
1308 SmallVector<BasicBlock*, 16> AllBlocks;
1309 SmallVector<Instruction*, 16> PointerComparisonsOrSubtracts;
1313 // Fill the set of memory operations to instrument.
1314 for (Function::iterator FI = F.begin(), FE = F.end();
1316 AllBlocks.push_back(FI);
1317 TempsToInstrument.clear();
1318 int NumInsnsPerBB = 0;
1319 for (BasicBlock::iterator BI = FI->begin(), BE = FI->end();
1321 if (LooksLikeCodeInBug11395(BI)) return false;
1322 if (Value *Addr = isInterestingMemoryAccess(BI, &IsWrite)) {
1323 if (ClOpt && ClOptSameTemp) {
1324 if (!TempsToInstrument.insert(Addr))
1325 continue; // We've seen this temp in the current BB.
1327 } else if (ClInvalidPointerPairs &&
1328 isInterestingPointerComparisonOrSubtraction(BI)) {
1329 PointerComparisonsOrSubtracts.push_back(BI);
1331 } else if (isa<MemIntrinsic>(BI) && ClMemIntrin) {
1334 if (isa<AllocaInst>(BI))
1338 // A call inside BB.
1339 TempsToInstrument.clear();
1340 if (CS.doesNotReturn())
1341 NoReturnCalls.push_back(CS.getInstruction());
1345 ToInstrument.push_back(BI);
1347 if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB)
1352 Function *UninstrumentedDuplicate = 0;
1353 bool LikelyToInstrument =
1354 !NoReturnCalls.empty() || !ToInstrument.empty() || (NumAllocas > 0);
1355 if (ClKeepUninstrumented && LikelyToInstrument) {
1356 ValueToValueMapTy VMap;
1357 UninstrumentedDuplicate = CloneFunction(&F, VMap, false);
1358 UninstrumentedDuplicate->removeFnAttr(Attribute::SanitizeAddress);
1359 UninstrumentedDuplicate->setName("NOASAN_" + F.getName());
1360 F.getParent()->getFunctionList().push_back(UninstrumentedDuplicate);
1363 bool UseCalls = false;
1364 if (ClInstrumentationWithCallsThreshold >= 0 &&
1365 ToInstrument.size() > (unsigned)ClInstrumentationWithCallsThreshold)
1369 int NumInstrumented = 0;
1370 for (size_t i = 0, n = ToInstrument.size(); i != n; i++) {
1371 Instruction *Inst = ToInstrument[i];
1372 if (ClDebugMin < 0 || ClDebugMax < 0 ||
1373 (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) {
1374 if (isInterestingMemoryAccess(Inst, &IsWrite))
1375 instrumentMop(Inst, UseCalls);
1377 instrumentMemIntrinsic(cast<MemIntrinsic>(Inst), UseCalls);
1382 FunctionStackPoisoner FSP(F, *this);
1383 bool ChangedStack = FSP.runOnFunction();
1385 // We must unpoison the stack before every NoReturn call (throw, _exit, etc).
1386 // See e.g. http://code.google.com/p/address-sanitizer/issues/detail?id=37
1387 for (size_t i = 0, n = NoReturnCalls.size(); i != n; i++) {
1388 Instruction *CI = NoReturnCalls[i];
1389 IRBuilder<> IRB(CI);
1390 IRB.CreateCall(AsanHandleNoReturnFunc);
1393 for (size_t i = 0, n = PointerComparisonsOrSubtracts.size(); i != n; i++) {
1394 instrumentPointerComparisonOrSubtraction(PointerComparisonsOrSubtracts[i]);
1398 bool res = NumInstrumented > 0 || ChangedStack || !NoReturnCalls.empty();
1400 if (InjectCoverage(F, AllBlocks))
1403 DEBUG(dbgs() << "ASAN done instrumenting: " << res << " " << F << "\n");
1405 if (ClKeepUninstrumented) {
1407 // No instrumentation is done, no need for the duplicate.
1408 if (UninstrumentedDuplicate)
1409 UninstrumentedDuplicate->eraseFromParent();
1411 // The function was instrumented. We must have the duplicate.
1412 assert(UninstrumentedDuplicate);
1413 UninstrumentedDuplicate->setSection("NOASAN");
1414 assert(!F.hasSection());
1415 F.setSection("ASAN");
1422 // Workaround for bug 11395: we don't want to instrument stack in functions
1423 // with large assembly blobs (32-bit only), otherwise reg alloc may crash.
1424 // FIXME: remove once the bug 11395 is fixed.
1425 bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) {
1426 if (LongSize != 32) return false;
1427 CallInst *CI = dyn_cast<CallInst>(I);
1428 if (!CI || !CI->isInlineAsm()) return false;
1429 if (CI->getNumArgOperands() <= 5) return false;
1430 // We have inline assembly with quite a few arguments.
1434 void FunctionStackPoisoner::initializeCallbacks(Module &M) {
1435 IRBuilder<> IRB(*C);
1436 for (int i = 0; i <= kMaxAsanStackMallocSizeClass; i++) {
1437 std::string Suffix = itostr(i);
1438 AsanStackMallocFunc[i] = checkInterfaceFunction(
1439 M.getOrInsertFunction(kAsanStackMallocNameTemplate + Suffix, IntptrTy,
1440 IntptrTy, IntptrTy, NULL));
1441 AsanStackFreeFunc[i] = checkInterfaceFunction(M.getOrInsertFunction(
1442 kAsanStackFreeNameTemplate + Suffix, IRB.getVoidTy(), IntptrTy,
1443 IntptrTy, IntptrTy, NULL));
1445 AsanPoisonStackMemoryFunc = checkInterfaceFunction(M.getOrInsertFunction(
1446 kAsanPoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1447 AsanUnpoisonStackMemoryFunc = checkInterfaceFunction(M.getOrInsertFunction(
1448 kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy, NULL));
1452 FunctionStackPoisoner::poisonRedZones(const ArrayRef<uint8_t> ShadowBytes,
1453 IRBuilder<> &IRB, Value *ShadowBase,
1455 size_t n = ShadowBytes.size();
1457 // We need to (un)poison n bytes of stack shadow. Poison as many as we can
1458 // using 64-bit stores (if we are on 64-bit arch), then poison the rest
1459 // with 32-bit stores, then with 16-byte stores, then with 8-byte stores.
1460 for (size_t LargeStoreSizeInBytes = ASan.LongSize / 8;
1461 LargeStoreSizeInBytes != 0; LargeStoreSizeInBytes /= 2) {
1462 for (; i + LargeStoreSizeInBytes - 1 < n; i += LargeStoreSizeInBytes) {
1464 for (size_t j = 0; j < LargeStoreSizeInBytes; j++) {
1465 if (ASan.DL->isLittleEndian())
1466 Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
1468 Val = (Val << 8) | ShadowBytes[i + j];
1471 Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
1472 Type *StoreTy = Type::getIntNTy(*C, LargeStoreSizeInBytes * 8);
1473 Value *Poison = ConstantInt::get(StoreTy, DoPoison ? Val : 0);
1474 IRB.CreateStore(Poison, IRB.CreateIntToPtr(Ptr, StoreTy->getPointerTo()));
1479 // Fake stack allocator (asan_fake_stack.h) has 11 size classes
1480 // for every power of 2 from kMinStackMallocSize to kMaxAsanStackMallocSizeClass
1481 static int StackMallocSizeClass(uint64_t LocalStackSize) {
1482 assert(LocalStackSize <= kMaxStackMallocSize);
1483 uint64_t MaxSize = kMinStackMallocSize;
1484 for (int i = 0; ; i++, MaxSize *= 2)
1485 if (LocalStackSize <= MaxSize)
1487 llvm_unreachable("impossible LocalStackSize");
1490 // Set Size bytes starting from ShadowBase to kAsanStackAfterReturnMagic.
1491 // We can not use MemSet intrinsic because it may end up calling the actual
1492 // memset. Size is a multiple of 8.
1493 // Currently this generates 8-byte stores on x86_64; it may be better to
1494 // generate wider stores.
1495 void FunctionStackPoisoner::SetShadowToStackAfterReturnInlined(
1496 IRBuilder<> &IRB, Value *ShadowBase, int Size) {
1497 assert(!(Size % 8));
1498 assert(kAsanStackAfterReturnMagic == 0xf5);
1499 for (int i = 0; i < Size; i += 8) {
1500 Value *p = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
1501 IRB.CreateStore(ConstantInt::get(IRB.getInt64Ty(), 0xf5f5f5f5f5f5f5f5ULL),
1502 IRB.CreateIntToPtr(p, IRB.getInt64Ty()->getPointerTo()));
1506 void FunctionStackPoisoner::poisonStack() {
1507 int StackMallocIdx = -1;
1509 assert(AllocaVec.size() > 0);
1510 Instruction *InsBefore = AllocaVec[0];
1511 IRBuilder<> IRB(InsBefore);
1513 SmallVector<ASanStackVariableDescription, 16> SVD;
1514 SVD.reserve(AllocaVec.size());
1515 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) {
1516 AllocaInst *AI = AllocaVec[i];
1517 ASanStackVariableDescription D = { AI->getName().data(),
1518 getAllocaSizeInBytes(AI),
1519 AI->getAlignment(), AI, 0};
1522 // Minimal header size (left redzone) is 4 pointers,
1523 // i.e. 32 bytes on 64-bit platforms and 16 bytes in 32-bit platforms.
1524 size_t MinHeaderSize = ASan.LongSize / 2;
1525 ASanStackFrameLayout L;
1526 ComputeASanStackFrameLayout(SVD, 1UL << Mapping.Scale, MinHeaderSize, &L);
1527 DEBUG(dbgs() << L.DescriptionString << " --- " << L.FrameSize << "\n");
1528 uint64_t LocalStackSize = L.FrameSize;
1529 bool DoStackMalloc =
1530 ASan.CheckUseAfterReturn && LocalStackSize <= kMaxStackMallocSize;
1532 Type *ByteArrayTy = ArrayType::get(IRB.getInt8Ty(), LocalStackSize);
1533 AllocaInst *MyAlloca =
1534 new AllocaInst(ByteArrayTy, "MyAlloca", InsBefore);
1535 assert((ClRealignStack & (ClRealignStack - 1)) == 0);
1536 size_t FrameAlignment = std::max(L.FrameAlignment, (size_t)ClRealignStack);
1537 MyAlloca->setAlignment(FrameAlignment);
1538 assert(MyAlloca->isStaticAlloca());
1539 Value *OrigStackBase = IRB.CreatePointerCast(MyAlloca, IntptrTy);
1540 Value *LocalStackBase = OrigStackBase;
1542 if (DoStackMalloc) {
1543 // LocalStackBase = OrigStackBase
1544 // if (__asan_option_detect_stack_use_after_return)
1545 // LocalStackBase = __asan_stack_malloc_N(LocalStackBase, OrigStackBase);
1546 StackMallocIdx = StackMallocSizeClass(LocalStackSize);
1547 assert(StackMallocIdx <= kMaxAsanStackMallocSizeClass);
1548 Constant *OptionDetectUAR = F.getParent()->getOrInsertGlobal(
1549 kAsanOptionDetectUAR, IRB.getInt32Ty());
1550 Value *Cmp = IRB.CreateICmpNE(IRB.CreateLoad(OptionDetectUAR),
1551 Constant::getNullValue(IRB.getInt32Ty()));
1552 Instruction *Term = SplitBlockAndInsertIfThen(Cmp, InsBefore, false);
1553 BasicBlock *CmpBlock = cast<Instruction>(Cmp)->getParent();
1554 IRBuilder<> IRBIf(Term);
1555 LocalStackBase = IRBIf.CreateCall2(
1556 AsanStackMallocFunc[StackMallocIdx],
1557 ConstantInt::get(IntptrTy, LocalStackSize), OrigStackBase);
1558 BasicBlock *SetBlock = cast<Instruction>(LocalStackBase)->getParent();
1559 IRB.SetInsertPoint(InsBefore);
1560 PHINode *Phi = IRB.CreatePHI(IntptrTy, 2);
1561 Phi->addIncoming(OrigStackBase, CmpBlock);
1562 Phi->addIncoming(LocalStackBase, SetBlock);
1563 LocalStackBase = Phi;
1566 // Insert poison calls for lifetime intrinsics for alloca.
1567 bool HavePoisonedAllocas = false;
1568 for (size_t i = 0, n = AllocaPoisonCallVec.size(); i < n; i++) {
1569 const AllocaPoisonCall &APC = AllocaPoisonCallVec[i];
1570 assert(APC.InsBefore);
1572 IRBuilder<> IRB(APC.InsBefore);
1573 poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
1574 HavePoisonedAllocas |= APC.DoPoison;
1577 // Replace Alloca instructions with base+offset.
1578 for (size_t i = 0, n = SVD.size(); i < n; i++) {
1579 AllocaInst *AI = SVD[i].AI;
1580 Value *NewAllocaPtr = IRB.CreateIntToPtr(
1581 IRB.CreateAdd(LocalStackBase,
1582 ConstantInt::get(IntptrTy, SVD[i].Offset)),
1584 replaceDbgDeclareForAlloca(AI, NewAllocaPtr, DIB);
1585 AI->replaceAllUsesWith(NewAllocaPtr);
1588 // The left-most redzone has enough space for at least 4 pointers.
1589 // Write the Magic value to redzone[0].
1590 Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy);
1591 IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic),
1593 // Write the frame description constant to redzone[1].
1594 Value *BasePlus1 = IRB.CreateIntToPtr(
1595 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, ASan.LongSize/8)),
1597 GlobalVariable *StackDescriptionGlobal =
1598 createPrivateGlobalForString(*F.getParent(), L.DescriptionString,
1599 /*AllowMerging*/true);
1600 Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal,
1602 IRB.CreateStore(Description, BasePlus1);
1603 // Write the PC to redzone[2].
1604 Value *BasePlus2 = IRB.CreateIntToPtr(
1605 IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy,
1606 2 * ASan.LongSize/8)),
1608 IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2);
1610 // Poison the stack redzones at the entry.
1611 Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
1612 poisonRedZones(L.ShadowBytes, IRB, ShadowBase, true);
1614 // (Un)poison the stack before all ret instructions.
1615 for (size_t i = 0, n = RetVec.size(); i < n; i++) {
1616 Instruction *Ret = RetVec[i];
1617 IRBuilder<> IRBRet(Ret);
1618 // Mark the current frame as retired.
1619 IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic),
1621 if (DoStackMalloc) {
1622 assert(StackMallocIdx >= 0);
1623 // if LocalStackBase != OrigStackBase:
1624 // // In use-after-return mode, poison the whole stack frame.
1625 // if StackMallocIdx <= 4
1626 // // For small sizes inline the whole thing:
1627 // memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
1628 // **SavedFlagPtr(LocalStackBase) = 0
1630 // __asan_stack_free_N(LocalStackBase, OrigStackBase)
1632 // <This is not a fake stack; unpoison the redzones>
1633 Value *Cmp = IRBRet.CreateICmpNE(LocalStackBase, OrigStackBase);
1634 TerminatorInst *ThenTerm, *ElseTerm;
1635 SplitBlockAndInsertIfThenElse(Cmp, Ret, &ThenTerm, &ElseTerm);
1637 IRBuilder<> IRBPoison(ThenTerm);
1638 if (StackMallocIdx <= 4) {
1639 int ClassSize = kMinStackMallocSize << StackMallocIdx;
1640 SetShadowToStackAfterReturnInlined(IRBPoison, ShadowBase,
1641 ClassSize >> Mapping.Scale);
1642 Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
1644 ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
1645 Value *SavedFlagPtr = IRBPoison.CreateLoad(
1646 IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
1647 IRBPoison.CreateStore(
1648 Constant::getNullValue(IRBPoison.getInt8Ty()),
1649 IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
1651 // For larger frames call __asan_stack_free_*.
1652 IRBPoison.CreateCall3(AsanStackFreeFunc[StackMallocIdx], LocalStackBase,
1653 ConstantInt::get(IntptrTy, LocalStackSize),
1657 IRBuilder<> IRBElse(ElseTerm);
1658 poisonRedZones(L.ShadowBytes, IRBElse, ShadowBase, false);
1659 } else if (HavePoisonedAllocas) {
1660 // If we poisoned some allocas in llvm.lifetime analysis,
1661 // unpoison whole stack frame now.
1662 assert(LocalStackBase == OrigStackBase);
1663 poisonAlloca(LocalStackBase, LocalStackSize, IRBRet, false);
1665 poisonRedZones(L.ShadowBytes, IRBRet, ShadowBase, false);
1669 // We are done. Remove the old unused alloca instructions.
1670 for (size_t i = 0, n = AllocaVec.size(); i < n; i++)
1671 AllocaVec[i]->eraseFromParent();
1674 void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size,
1675 IRBuilder<> &IRB, bool DoPoison) {
1676 // For now just insert the call to ASan runtime.
1677 Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy);
1678 Value *SizeArg = ConstantInt::get(IntptrTy, Size);
1679 IRB.CreateCall2(DoPoison ? AsanPoisonStackMemoryFunc
1680 : AsanUnpoisonStackMemoryFunc,
1684 // Handling llvm.lifetime intrinsics for a given %alloca:
1685 // (1) collect all llvm.lifetime.xxx(%size, %value) describing the alloca.
1686 // (2) if %size is constant, poison memory for llvm.lifetime.end (to detect
1687 // invalid accesses) and unpoison it for llvm.lifetime.start (the memory
1688 // could be poisoned by previous llvm.lifetime.end instruction, as the
1689 // variable may go in and out of scope several times, e.g. in loops).
1690 // (3) if we poisoned at least one %alloca in a function,
1691 // unpoison the whole stack frame at function exit.
1693 AllocaInst *FunctionStackPoisoner::findAllocaForValue(Value *V) {
1694 if (AllocaInst *AI = dyn_cast<AllocaInst>(V))
1695 // We're intested only in allocas we can handle.
1696 return isInterestingAlloca(*AI) ? AI : 0;
1697 // See if we've already calculated (or started to calculate) alloca for a
1699 AllocaForValueMapTy::iterator I = AllocaForValue.find(V);
1700 if (I != AllocaForValue.end())
1702 // Store 0 while we're calculating alloca for value V to avoid
1703 // infinite recursion if the value references itself.
1704 AllocaForValue[V] = 0;
1705 AllocaInst *Res = 0;
1706 if (CastInst *CI = dyn_cast<CastInst>(V))
1707 Res = findAllocaForValue(CI->getOperand(0));
1708 else if (PHINode *PN = dyn_cast<PHINode>(V)) {
1709 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
1710 Value *IncValue = PN->getIncomingValue(i);
1711 // Allow self-referencing phi-nodes.
1712 if (IncValue == PN) continue;
1713 AllocaInst *IncValueAI = findAllocaForValue(IncValue);
1714 // AI for incoming values should exist and should all be equal.
1715 if (IncValueAI == 0 || (Res != 0 && IncValueAI != Res))
1721 AllocaForValue[V] = Res;