Don't zap back to back volatile load/stores
authorChris Lattner <sabre@nondot.org>
Fri, 7 Sep 2007 05:33:03 +0000 (05:33 +0000)
committerChris Lattner <sabre@nondot.org>
Fri, 7 Sep 2007 05:33:03 +0000 (05:33 +0000)
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@41759 91177308-0d34-0410-b5e6-96231b3b80d8

lib/Transforms/Scalar/InstructionCombining.cpp
test/Transforms/InstCombine/volatile_store.ll [new file with mode: 0644]

index 26df55531a8365753e87b614e51d9c38464594a6..f9abafa34c0f5f976565eed7b2ff44ec692ede72 100644 (file)
@@ -9066,7 +9066,7 @@ Instruction *InstCombiner::visitStoreInst(StoreInst &SI) {
     // the pointer we're loading and is producing the pointer we're storing,
     // then *this* store is dead (X = load P; store X -> P).
     if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
-      if (LI == Val && LI->getOperand(0) == Ptr) {
+      if (LI == Val && LI->getOperand(0) == Ptr && !SI.isVolatile()) {
         EraseInstFromFunction(SI);
         ++NumCombined;
         return 0;
diff --git a/test/Transforms/InstCombine/volatile_store.ll b/test/Transforms/InstCombine/volatile_store.ll
new file mode 100644 (file)
index 0000000..09651ba
--- /dev/null
@@ -0,0 +1,14 @@
+; RUN: llvm-as < %s | opt -instcombine | llvm-dis | grep {volatile store}
+; RUN: llvm-as < %s | opt -instcombine | llvm-dis | grep {volatile load}
+
+@x = weak global i32 0         ; <i32*> [#uses=2]
+
+define void @self_assign_1() {
+entry:
+       %tmp = volatile load i32* @x            ; <i32> [#uses=1]
+       volatile store i32 %tmp, i32* @x
+       br label %return
+
+return:                ; preds = %entry
+       ret void
+}