namespace {
bool StoreAddressDependOnValue(StoreInst* SI, Value* DepVal);
+bool LoadAddressDependOnValue(LoadInst* LI, Value* DepVal);
Value* GetUntaintedAddress(Value* CurrentAddress);
// The depth we trace down a variable to look for its dependence set.
return usage_inst;
}
+// XXX-comment: For relaxed load 'LI', and the first upcoming store/conditional
+// branch instruction 'FirstInst', returns whether there are any intermediate
+// instructions I (including 'FirstInst') that satisfy:
+// 1. I is a load/store, and its address depends on 'LI'.
+// 2. I is a conditional branch whose condition depends on 'LI'.
+// Note that 'LI' and 'FirstInst' can be in different basic blocks, but LI's
+// basic block can unconditionally jumps (by steps) to FirstInst's block.
+bool NeedExtraConstraints(LoadInst* LI, Instruction* FirstInst) {
+ if (!FirstInst) {
+ return true;
+ }
+ auto* BB = LI->getParent();
+ auto BBI = LI->getIterator();
+ BBI++;
+ while (true) {
+ auto* I = &*BBI;
+ BBI++;
+ BranchInst *BI = dyn_cast<BranchInst>(I);
+ if (BI && BI->isUnconditional()) {
+ BasicBlock *DestBB = BI->getSuccessor(0);
+ BBI = DestBB->begin();
+ continue;
+ }
+
+ if (I->getOpcode() == Instruction::Store) {
+ return !StoreAddressDependOnValue(dyn_cast<StoreInst>(I), LI);
+ } else if (I->getOpcode() == Instruction::Load) {
+ if (I->isAtomic() &&
+ LoadAddressDependOnValue(dyn_cast<LoadInst>(I), LI)) {
+ // Normal loads are subject to be reordered by the backend, so we only
+ // rely on atomic loads.
+ return false;
+ }
+ } else if (I->getOpcode() == Instruction::Br) {
+ return !ConditionalBranchDependsOnValue(dyn_cast<BranchInst>(I), LI);
+ }
+ if (I == FirstInst) {
+ return true;
+ }
+ }
+ return true;
+}
+
// XXX-comment: Returns whether the code has been changed.
bool AddFakeConditionalBranchAfterMonotonicLoads(
SmallSet<LoadInst*, 1>& MonotonicLoadInsts, DominatorTree* DT) {
MonotonicLoadInsts.erase(LI);
SmallVector<BasicBlock*, 2> ChainedBB;
auto* FirstInst = findFirstStoreCondBranchInst(LI, &ChainedBB);
- if (FirstInst != nullptr) {
- if (FirstInst->getOpcode() == Instruction::Store) {
- if (StoreAddressDependOnValue(dyn_cast<StoreInst>(FirstInst), LI)) {
- continue;
- }
- } else if (FirstInst->getOpcode() == Instruction::Br) {
- if (ConditionalBranchDependsOnValue(dyn_cast<BranchInst>(FirstInst),
- LI)) {
- continue;
- }
- } else {
- IntrinsicInst* II = dyn_cast<IntrinsicInst>(FirstInst);
- if (!II || II->getIntrinsicID() != Intrinsic::aarch64_stlxr) {
- dbgs() << "FirstInst=" << *FirstInst << "\n";
- assert(false && "findFirstStoreCondBranchInst() should return a "
- "store/condition branch instruction");
- }
- }
+
+ // First check whether existing load-store ordering constraints exist.
+ if (FirstInst != nullptr && !NeedExtraConstraints(LI, FirstInst)) {
+ continue;
}
// We really need to process the relaxed load now.
return dependenceSetInclusion(SI->getPointerOperand(), DepVal);
}
+bool LoadAddressDependOnValue(LoadInst* LI, Value* DepVal) {
+ return dependenceSetInclusion(LI->getPointerOperand(), DepVal);
+}
+
bool StoreDependOnValue(StoreInst* SI, Value* Dep) {
return dependenceSetInclusion(SI, Dep);
}