Don't taint relaxed loads that immediately comes before an AcqRel read-modify-write op
authorPeizhao Ou <peizhaoo@uci.edu>
Tue, 13 Mar 2018 02:07:17 +0000 (19:07 -0700)
committerPeizhao Ou <peizhaoo@uci.edu>
Tue, 13 Mar 2018 02:07:17 +0000 (19:07 -0700)
include/llvm/IR/Instructions.h
lib/CodeGen/AtomicExpandPass.cpp
lib/CodeGen/CodeGenPrepare.cpp

index 28e1fd90fdf63cfde52c0cf5a02a0e2c120675da..cc6c25974cd5422ed48a6691a033783ea3fab7aa 100644 (file)
@@ -228,6 +228,14 @@ public:
   LoadInst(Value *Ptr, const char *NameStr, bool isVolatile,
            BasicBlock *InsertAtEnd);
 
   LoadInst(Value *Ptr, const char *NameStr, bool isVolatile,
            BasicBlock *InsertAtEnd);
 
+  bool getHasSubsequentAcqlRMW() {
+    return hasSubsequentAcqlRMW_;
+  }
+
+  void setHasSubsequentAcqlRMW(bool val) {
+    hasSubsequentAcqlRMW_ = val;
+  }
+
   /// isVolatile - Return true if this is a load from a volatile memory
   /// location.
   ///
   /// isVolatile - Return true if this is a load from a volatile memory
   /// location.
   ///
@@ -306,6 +314,8 @@ private:
   void setInstructionSubclassData(unsigned short D) {
     Instruction::setInstructionSubclassData(D);
   }
   void setInstructionSubclassData(unsigned short D) {
     Instruction::setInstructionSubclassData(D);
   }
+
+  bool hasSubsequentAcqlRMW_;
 };
 
 //===----------------------------------------------------------------------===//
 };
 
 //===----------------------------------------------------------------------===//
index c8308afe9c1450d1fa2b7215250779e7be1ef703..077c52b19a7a3d1907cdd3dcda9eb967ccaf0f51 100644 (file)
@@ -71,6 +71,33 @@ namespace {
     bool isIdempotentRMW(AtomicRMWInst *AI);
     bool simplifyIdempotentRMW(AtomicRMWInst *AI);
   };
     bool isIdempotentRMW(AtomicRMWInst *AI);
     bool simplifyIdempotentRMW(AtomicRMWInst *AI);
   };
+
+
+  // If 'LI' is a relaxed load, and it is immediately followed by a
+// atomic read-modify-write that has acq_rel parameter, we don't have to do
+// anything since the rmw serves as a natural barrier.
+void MarkRelaxedLoadBeforeAcqrelRMW(LoadInst* LI) {
+  auto* BB = LI->getParent();
+  auto BBI = LI->getIterator();
+  for (BBI++; BBI != BB->end(); BBI++) {
+    Instruction* CurInst = &*BBI;
+    if (!CurInst) {
+      return;
+    }
+    if (!CurInst->isAtomic()) {
+      continue;
+    }
+    auto* RMW = dyn_cast<AtomicRMWInst>(CurInst);
+    if (!RMW) {
+      return;
+    }
+    if (RMW->getOrdering() == AcquireRelease ||
+        RMW->getOrdering() == SequentiallyConsistent) {
+      LI->setHasSubsequentAcqlRMW(true);
+    }
+  }
+}
+
 }
 
 char AtomicExpand::ID = 0;
 }
 
 char AtomicExpand::ID = 0;
@@ -133,7 +160,8 @@ bool AtomicExpand::runOnFunction(Function &F) {
                          << *LI << '\n');
             LI->setOrdering(Acquire);
             */
                          << *LI << '\n');
             LI->setOrdering(Acquire);
             */
-            MonotonicLoadInsts.push_back(LI);
+//            MonotonicLoadInsts.push_back(LI);
+            MarkRelaxedLoadBeforeAcqrelRMW(LI);
           }
           break;
         }
           }
           break;
         }
index 96fa10bbb8eaddcf96c6ac91b92e9acfda2a91c2..1837ba2bd5d4678d3f5132aaf421f6c9ac4a25bd 100644 (file)
@@ -1413,7 +1413,8 @@ bool CodeGenPrepare::runOnFunction(Function &F) {
       switch (I->getOpcode()) {
         case Instruction::Load: {
           auto* LI = dyn_cast<LoadInst>(&*I);
       switch (I->getOpcode()) {
         case Instruction::Load: {
           auto* LI = dyn_cast<LoadInst>(&*I);
-          if (LI->getOrdering() == Monotonic) {
+          if (LI->getOrdering() == Monotonic &&
+              !LI->getHasSubsequentAcqlRMW()) {
             MonotonicLoadInsts.insert(LI);
           }
           break;
             MonotonicLoadInsts.insert(LI);
           }
           break;