Fixed line endings.
authorSimon Pilgrim <llvm-dev@redking.me.uk>
Wed, 5 Aug 2015 08:18:00 +0000 (08:18 +0000)
committerSimon Pilgrim <llvm-dev@redking.me.uk>
Wed, 5 Aug 2015 08:18:00 +0000 (08:18 +0000)
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@244021 91177308-0d34-0410-b5e6-96231b3b80d8

lib/Transforms/InstCombine/InstCombineCalls.cpp

index e09c00253513f4508cd481c15c6041f98aa72eae..668789b516d0704606936c9a27c7517e6fed7321 100644 (file)
@@ -194,44 +194,44 @@ Instruction *InstCombiner::SimplifyMemSet(MemSetInst *MI) {
     return MI;
   }
 
-  return nullptr;\r
-}\r
-\r
-static Value *SimplifyX86immshift(const IntrinsicInst &II,\r
-                                  InstCombiner::BuilderTy &Builder,\r
-                                  bool ShiftLeft) {\r
-  // Simplify if count is constant. To 0 if >= BitWidth,\r
-  // otherwise to shl/lshr.\r
-  auto CDV = dyn_cast<ConstantDataVector>(II.getArgOperand(1));\r
-  auto CInt = dyn_cast<ConstantInt>(II.getArgOperand(1));\r
-  if (!CDV && !CInt)\r
-    return nullptr;\r
-  ConstantInt *Count;\r
-  if (CDV)\r
-    Count = cast<ConstantInt>(CDV->getElementAsConstant(0));\r
-  else\r
-    Count = CInt;\r
-\r
-  auto Vec = II.getArgOperand(0);\r
-  auto VT = cast<VectorType>(Vec->getType());\r
-  auto SVT = VT->getElementType();\r
-  if (Count->getZExtValue() > (SVT->getPrimitiveSizeInBits() - 1))\r
-    return ConstantAggregateZero::get(VT);\r
-\r
-  unsigned VWidth = VT->getNumElements();\r
-\r
-  // Get a constant vector of the same type as the first operand.\r
-  auto VTCI = ConstantInt::get(VT->getElementType(), Count->getZExtValue());\r
-\r
-  if (ShiftLeft)\r
-    return Builder.CreateShl(Vec, Builder.CreateVectorSplat(VWidth, VTCI));\r
-\r
-  return Builder.CreateLShr(Vec, Builder.CreateVectorSplat(VWidth, VTCI));\r
-}\r
-\r
-static Value *SimplifyX86extend(const IntrinsicInst &II,\r
-                                InstCombiner::BuilderTy &Builder,\r
-                                bool SignExtend) {\r
+  return nullptr;
+}
+
+static Value *SimplifyX86immshift(const IntrinsicInst &II,
+                                  InstCombiner::BuilderTy &Builder,
+                                  bool ShiftLeft) {
+  // Simplify if count is constant. To 0 if >= BitWidth,
+  // otherwise to shl/lshr.
+  auto CDV = dyn_cast<ConstantDataVector>(II.getArgOperand(1));
+  auto CInt = dyn_cast<ConstantInt>(II.getArgOperand(1));
+  if (!CDV && !CInt)
+    return nullptr;
+  ConstantInt *Count;
+  if (CDV)
+    Count = cast<ConstantInt>(CDV->getElementAsConstant(0));
+  else
+    Count = CInt;
+
+  auto Vec = II.getArgOperand(0);
+  auto VT = cast<VectorType>(Vec->getType());
+  auto SVT = VT->getElementType();
+  if (Count->getZExtValue() > (SVT->getPrimitiveSizeInBits() - 1))
+    return ConstantAggregateZero::get(VT);
+
+  unsigned VWidth = VT->getNumElements();
+
+  // Get a constant vector of the same type as the first operand.
+  auto VTCI = ConstantInt::get(VT->getElementType(), Count->getZExtValue());
+
+  if (ShiftLeft)
+    return Builder.CreateShl(Vec, Builder.CreateVectorSplat(VWidth, VTCI));
+
+  return Builder.CreateLShr(Vec, Builder.CreateVectorSplat(VWidth, VTCI));
+}
+
+static Value *SimplifyX86extend(const IntrinsicInst &II,
+                                InstCombiner::BuilderTy &Builder,
+                                bool SignExtend) {
   VectorType *SrcTy = cast<VectorType>(II.getArgOperand(0)->getType());
   VectorType *DstTy = cast<VectorType>(II.getType());
   unsigned NumDstElts = DstTy->getNumElements();
@@ -750,46 +750,46 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
       II->setArgOperand(0, V);
       return II;
     }
-    break;\r
-  }\r
-\r
-  // Constant fold lshr( <A x Bi>, Ci ).\r
-  case Intrinsic::x86_sse2_psrl_d:\r
-  case Intrinsic::x86_sse2_psrl_q:\r
-  case Intrinsic::x86_sse2_psrl_w:\r
-  case Intrinsic::x86_sse2_psrli_d:\r
-  case Intrinsic::x86_sse2_psrli_q:\r
-  case Intrinsic::x86_sse2_psrli_w:\r
-  case Intrinsic::x86_avx2_psrl_d:\r
-  case Intrinsic::x86_avx2_psrl_q:\r
-  case Intrinsic::x86_avx2_psrl_w:\r
-  case Intrinsic::x86_avx2_psrli_d:\r
-  case Intrinsic::x86_avx2_psrli_q:\r
-  case Intrinsic::x86_avx2_psrli_w:\r
-    if (Value *V = SimplifyX86immshift(*II, *Builder, false))\r
-      return ReplaceInstUsesWith(*II, V);\r
-    break;\r
-\r
-  // Constant fold shl( <A x Bi>, Ci ).\r
-  case Intrinsic::x86_sse2_psll_d:\r
-  case Intrinsic::x86_sse2_psll_q:\r
-  case Intrinsic::x86_sse2_psll_w:\r
+    break;
+  }
+
+  // Constant fold lshr( <A x Bi>, Ci ).
+  case Intrinsic::x86_sse2_psrl_d:
+  case Intrinsic::x86_sse2_psrl_q:
+  case Intrinsic::x86_sse2_psrl_w:
+  case Intrinsic::x86_sse2_psrli_d:
+  case Intrinsic::x86_sse2_psrli_q:
+  case Intrinsic::x86_sse2_psrli_w:
+  case Intrinsic::x86_avx2_psrl_d:
+  case Intrinsic::x86_avx2_psrl_q:
+  case Intrinsic::x86_avx2_psrl_w:
+  case Intrinsic::x86_avx2_psrli_d:
+  case Intrinsic::x86_avx2_psrli_q:
+  case Intrinsic::x86_avx2_psrli_w:
+    if (Value *V = SimplifyX86immshift(*II, *Builder, false))
+      return ReplaceInstUsesWith(*II, V);
+    break;
+
+  // Constant fold shl( <A x Bi>, Ci ).
+  case Intrinsic::x86_sse2_psll_d:
+  case Intrinsic::x86_sse2_psll_q:
+  case Intrinsic::x86_sse2_psll_w:
   case Intrinsic::x86_sse2_pslli_d:
   case Intrinsic::x86_sse2_pslli_q:
   case Intrinsic::x86_sse2_pslli_w:
   case Intrinsic::x86_avx2_psll_d:
   case Intrinsic::x86_avx2_psll_q:
   case Intrinsic::x86_avx2_psll_w:
-  case Intrinsic::x86_avx2_pslli_d:\r
-  case Intrinsic::x86_avx2_pslli_q:\r
-  case Intrinsic::x86_avx2_pslli_w:\r
-    if (Value *V = SimplifyX86immshift(*II, *Builder, true))\r
-      return ReplaceInstUsesWith(*II, V);\r
-    break;\r
-\r
-  case Intrinsic::x86_sse41_pmovsxbd:\r
-  case Intrinsic::x86_sse41_pmovsxbq:\r
-  case Intrinsic::x86_sse41_pmovsxbw:\r
+  case Intrinsic::x86_avx2_pslli_d:
+  case Intrinsic::x86_avx2_pslli_q:
+  case Intrinsic::x86_avx2_pslli_w:
+    if (Value *V = SimplifyX86immshift(*II, *Builder, true))
+      return ReplaceInstUsesWith(*II, V);
+    break;
+
+  case Intrinsic::x86_sse41_pmovsxbd:
+  case Intrinsic::x86_sse41_pmovsxbq:
+  case Intrinsic::x86_sse41_pmovsxbw:
   case Intrinsic::x86_sse41_pmovsxdq:
   case Intrinsic::x86_sse41_pmovsxwd:
   case Intrinsic::x86_sse41_pmovsxwq: