From: Peizhao Ou Date: Fri, 13 Apr 2018 08:26:17 +0000 (-0700) Subject: Compacts the load tainting to the last load if multiple loads are in a sequence X-Git-Url: http://plrg.eecs.uci.edu/git/?a=commitdiff_plain;h=3006f109823bdb9d012d968b95d7c449389aafd5;p=oota-llvm.git Compacts the load tainting to the last load if multiple loads are in a sequence --- diff --git a/lib/CodeGen/CodeGenPrepare.cpp b/lib/CodeGen/CodeGenPrepare.cpp index 8b9111cafda..2abac08d904 100644 --- a/lib/CodeGen/CodeGenPrepare.cpp +++ b/lib/CodeGen/CodeGenPrepare.cpp @@ -894,6 +894,71 @@ Instruction* findFirstLoadStoreCondBranchInst(LoadInst* LI, Vector* ChainedBB) { } } +// XXX-update: For a relaxed load 'LI', if the first upcoming instruction is a +// store/cond branch, return it; if its a load, returns the last load before the +// first store/cond branch. Returns nullptr if there's no such immediately +// following store/branch instructions, which we can only enforce the load with +// 'acquire'. 'ChainedBB' contains all the blocks chained together with +// unconditional branches from 'BB' to the block with the first store/cond +// branch. +template +Instruction* findLastLoadOrFirstStoreCondBranchInst(LoadInst* LI, Vector* ChainedBB) { + assert(ChainedBB != nullptr && "Chained BB should not be nullptr"); + auto* BB = LI->getParent(); + ChainedBB->push_back(BB); + auto BE = BB->end(); + auto BBI = BasicBlock::iterator(LI); + BBI++; + bool IsFirstLoad = false; + Instruction* LastLI = nullptr; + while (true) { + for (; BBI != BE; BBI++) { + Instruction* Inst = &*BBI; + IntrinsicInst* II = dyn_cast(&*BBI); + if (II) { + if (II->getIntrinsicID() == Intrinsic::aarch64_stlxr) { + + } else if (II->getIntrinsicID() == Intrinsic::aarch64_ldxr) { + return II; + } + } else if (Inst->getOpcode() == Instruction::Load) { + IsFirstLoad = true; + LastLI = Inst; + } else if (Inst->getOpcode() == Instruction::Store) { + if (IsFirstLoad) { + return LastLI; + } else { + return Inst; + } + } else if (Inst->getOpcode() == Instruction::Br) { + auto* BrInst = dyn_cast(Inst); + if (BrInst->isConditional()) { + if (IsFirstLoad) { + return LastLI; + } else { + return Inst; + } + } else { + // Reinitialize iterators with the destination of the unconditional + // branch. + BB = BrInst->getSuccessor(0); + ChainedBB->push_back(BB); + BBI = BB->begin(); + BE = BB->end(); + break; + } + } + } + if (BBI == BE) { + if (IsFirstLoad) { + return LastLI; + } else { + return nullptr; + } + } + } +} + // XXX-update: Find the next node of the last relaxed load from 'FromInst' to // 'ToInst'. If none, return 'ToInst'. Instruction* findLastLoadNext(Instruction* FromInst, Instruction* ToInst) { @@ -1144,12 +1209,13 @@ bool AddFakeConditionalBranchAfterMonotonicLoads( auto* LI = *MonotonicLoadInsts.begin(); MonotonicLoadInsts.erase(LI); SmallVector ChainedBB; - auto* FirstInst = findFirstLoadStoreCondBranchInst(LI, &ChainedBB); - if (!NeedExtraConstraints(LI, FirstInst)) { + auto* FirstStoreBranch = findFirstStoreCondBranchInst(LI, &ChainedBB); + if (!NeedExtraConstraints(LI, FirstStoreBranch)) { // 'LI' doesn't need extra load-store constraints. continue; } + auto* FirstInst = findLastLoadOrFirstStoreCondBranchInst(LI, &ChainedBB); // We really need to process the relaxed load now. Note that if the next // instruction is a RMW, it will be transformed into a control block, so we // can safely only taint upcoming store instructions.