From baca5334db904819e6c0d26cd5f5203f82c44f6e Mon Sep 17 00:00:00 2001 From: Benjamin Kramer Date: Mon, 23 Sep 2013 14:16:38 +0000 Subject: [PATCH] InstSimplify: Fold equality comparisons between non-inbounds GEPs. Overflow doesn't affect the correctness of equalities. Computing this is cheap, we just reuse the computation for the inbounds case and try to peel of more non-inbounds GEPs. This pattern is unlikely to ever appear in code generated by Clang, but SCEV occasionally produces it. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@191200 91177308-0d34-0410-b5e6-96231b3b80d8 --- lib/Analysis/InstructionSimplify.cpp | 17 +++++++++++++++-- test/Transforms/InstSimplify/compare.ll | 9 +++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/lib/Analysis/InstructionSimplify.cpp b/lib/Analysis/InstructionSimplify.cpp index 4b29824082f..af65cb74083 100644 --- a/lib/Analysis/InstructionSimplify.cpp +++ b/lib/Analysis/InstructionSimplify.cpp @@ -668,7 +668,8 @@ Value *llvm::SimplifyAddInst(Value *Op0, Value *Op1, bool isNSW, bool isNUW, /// follow non-inbounds geps. This allows it to remain usable for icmp ult/etc. /// folding. static Constant *stripAndComputeConstantOffsets(const DataLayout *TD, - Value *&V) { + Value *&V, + bool AllowNonInbounds = false) { assert(V->getType()->getScalarType()->isPointerTy()); // Without DataLayout, just be conservative for now. Theoretically, more could @@ -685,7 +686,8 @@ static Constant *stripAndComputeConstantOffsets(const DataLayout *TD, Visited.insert(V); do { if (GEPOperator *GEP = dyn_cast(V)) { - if (!GEP->isInBounds() || !GEP->accumulateConstantOffset(*TD, Offset)) + if ((!AllowNonInbounds && !GEP->isInBounds()) || + !GEP->accumulateConstantOffset(*TD, Offset)) break; V = GEP->getPointerOperand(); } else if (Operator::getOpcode(V) == Instruction::BitCast) { @@ -1837,6 +1839,17 @@ static Constant *computePointerICmp(const DataLayout *TD, return ConstantInt::get(GetCompareTy(LHS), !CmpInst::isTrueWhenEqual(Pred)); } + + // Even if an non-inbounds GEP occurs along the path we can still optimize + // equality comparisons concerning the result. We avoid walking the whole + // chain again by starting where the last calls to + // stripAndComputeConstantOffsets left off and accumulate the offsets. + Constant *LHSNoBound = stripAndComputeConstantOffsets(TD, LHS, true); + Constant *RHSNoBound = stripAndComputeConstantOffsets(TD, RHS, true); + if (LHS == RHS) + return ConstantExpr::getICmp(Pred, + ConstantExpr::getAdd(LHSOffset, LHSNoBound), + ConstantExpr::getAdd(RHSOffset, RHSNoBound)); } // Otherwise, fail. diff --git a/test/Transforms/InstSimplify/compare.ll b/test/Transforms/InstSimplify/compare.ll index 095794923b9..73188aa697b 100644 --- a/test/Transforms/InstSimplify/compare.ll +++ b/test/Transforms/InstSimplify/compare.ll @@ -717,3 +717,12 @@ define i1 @alloca_gep(i64 %a, i64 %b) { ret i1 %cmp ; CHECK-NEXT: ret i1 false } + +define i1 @non_inbounds_gep_compare(i64* %a) { +; CHECK-LABEL: @non_inbounds_gep_compare( +; Equality compares with non-inbounds GEPs can be folded. + %x = getelementptr i64* %a, i64 42 + %cmp = icmp eq i64* %a, %x + ret i1 %cmp +; CHECK-NEXT: ret i1 false +} -- 2.34.1