int OtherOpNum = OtherOp;
}
+/// SDTCisEltOfVec - This indicates that ThisOp is a scalar type of the same
+/// type as the element type of OtherOp, which is a vector type.
+class SDTCisEltOfVec<int ThisOp, int OtherOp>
+ : SDTypeConstraint<ThisOp> {
+ int OtherOpNum = OtherOp;
+}
+
//===----------------------------------------------------------------------===//
// Selection DAG Type Profile definitions.
//
SDTCisPtrTy<0>
]>;
-def SDTRet : SDTypeProfile<0, 0, []>; // ret
+def SDTNone : SDTypeProfile<0, 0, []>; // ret, trap
def SDTLoad : SDTypeProfile<1, 1, [ // load
SDTCisPtrTy<1>
def SDTVecShuffle : SDTypeProfile<1, 3, [
SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>, SDTCisIntVectorOfSameSize<3, 0>
]>;
+def SDTVecExtract : SDTypeProfile<1, 2, [ // vector extract
+ SDTCisEltOfVec<0, 1>, SDTCisPtrTy<2>
+]>;
+def SDTVecInsert : SDTypeProfile<1, 3, [ // vector insert
+ SDTCisEltOfVec<2, 1>, SDTCisSameAs<0, 1>, SDTCisPtrTy<3>
+]>;
+
+def STDPrefetch : SDTypeProfile<0, 3, [ // prefetch
+ SDTCisPtrTy<0>, SDTCisSameAs<1, 2>, SDTCisInt<1>
+]>;
+
+def STDMemBarrier : SDTypeProfile<0, 5, [ // memory barier
+ SDTCisSameAs<0,1>, SDTCisSameAs<0,2>, SDTCisSameAs<0,3>, SDTCisSameAs<0,4>,
+ SDTCisInt<0>
+]>;
+def STDAtomic3 : SDTypeProfile<1, 3, [
+ SDTCisSameAs<0,2>, SDTCisSameAs<0,3>, SDTCisInt<0>, SDTCisPtrTy<1>
+]>;
+def STDAtomic2 : SDTypeProfile<1, 2, [
+ SDTCisSameAs<0,2>, SDTCisInt<0>, SDTCisPtrTy<1>
+]>;
class SDCallSeqStart<list<SDTypeConstraint> constraints> :
SDTypeProfile<0, 1, constraints>;
def SDNPOptInFlag : SDNodeProperty; // Optionally read a flag operand
def SDNPMayStore : SDNodeProperty; // May write to memory, sets 'mayStore'.
def SDNPMayLoad : SDNodeProperty; // May read memory, sets 'mayLoad'.
+def SDNPSideEffect : SDNodeProperty; // Sets 'HasUnmodelledSideEffects'.
//===----------------------------------------------------------------------===//
// Selection DAG Node definitions.
def srcvalue;
def imm : SDNode<"ISD::Constant" , SDTIntLeaf , [], "ConstantSDNode">;
-def fpimm : SDNode<"ISD::TargetConstantFP",
- SDTFPLeaf, [], "ConstantFPSDNode">;
+def fpimm : SDNode<"ISD::ConstantFP", SDTFPLeaf , [], "ConstantFPSDNode">;
def vt : SDNode<"ISD::VALUETYPE" , SDTOther , [], "VTSDNode">;
def bb : SDNode<"ISD::BasicBlock", SDTOther , [], "BasicBlockSDNode">;
def cond : SDNode<"ISD::CONDCODE" , SDTOther , [], "CondCodeSDNode">;
def anyext : SDNode<"ISD::ANY_EXTEND" , SDTIntExtendOp>;
def trunc : SDNode<"ISD::TRUNCATE" , SDTIntTruncOp>;
def bitconvert : SDNode<"ISD::BIT_CONVERT", SDTUnaryOp>;
+def extractelt : SDNode<"ISD::EXTRACT_VECTOR_ELT", SDTVecExtract>;
+def insertelt : SDNode<"ISD::INSERT_VECTOR_ELT", SDTVecInsert>;
+
def fadd : SDNode<"ISD::FADD" , SDTFPBinOp, [SDNPCommutative]>;
def fsub : SDNode<"ISD::FSUB" , SDTFPBinOp>;
def brcond : SDNode<"ISD::BRCOND" , SDTBrcond, [SDNPHasChain]>;
def brind : SDNode<"ISD::BRIND" , SDTBrind, [SDNPHasChain]>;
def br : SDNode<"ISD::BR" , SDTBr, [SDNPHasChain]>;
-def ret : SDNode<"ISD::RET" , SDTRet, [SDNPHasChain]>;
+def ret : SDNode<"ISD::RET" , SDTNone, [SDNPHasChain]>;
+def trap : SDNode<"ISD::TRAP" , SDTNone,
+ [SDNPHasChain, SDNPSideEffect]>;
+
+def prefetch : SDNode<"ISD::PREFETCH" , STDPrefetch,
+ [SDNPHasChain, SDNPMayLoad, SDNPMayStore]>;
+
+def membarrier : SDNode<"ISD::MEMBARRIER" , STDMemBarrier,
+ [SDNPHasChain, SDNPSideEffect]>;
+
+// Do not use atomic_* directly, use atomic_*_size (see below)
+def atomic_lcs : SDNode<"ISD::ATOMIC_LCS" , STDAtomic3,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
+def atomic_las : SDNode<"ISD::ATOMIC_LAS" , STDAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
+def atomic_swap : SDNode<"ISD::ATOMIC_SWAP", STDAtomic2,
+ [SDNPHasChain, SDNPMayStore, SDNPMayLoad]>;
// Do not use ld, st directly. Use load, extload, sextload, zextload, store,
// and truncst (see below).
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i1;
+ LD->getMemoryVT() == MVT::i1;
return false;
}]>;
def extloadi8 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i8;
+ LD->getMemoryVT() == MVT::i8;
return false;
}]>;
def extloadi16 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i16;
+ LD->getMemoryVT() == MVT::i16;
return false;
}]>;
def extloadi32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i32;
+ LD->getMemoryVT() == MVT::i32;
return false;
}]>;
def extloadf32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::f32;
+ LD->getMemoryVT() == MVT::f32;
return false;
}]>;
def extloadf64 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::EXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::f64;
+ LD->getMemoryVT() == MVT::f64;
return false;
}]>;
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i1;
+ LD->getMemoryVT() == MVT::i1;
return false;
}]>;
def sextloadi8 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i8;
+ LD->getMemoryVT() == MVT::i8;
return false;
}]>;
def sextloadi16 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i16;
+ LD->getMemoryVT() == MVT::i16;
return false;
}]>;
def sextloadi32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::SEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i32;
+ LD->getMemoryVT() == MVT::i32;
return false;
}]>;
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i1;
+ LD->getMemoryVT() == MVT::i1;
return false;
}]>;
def zextloadi8 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i8;
+ LD->getMemoryVT() == MVT::i8;
return false;
}]>;
def zextloadi16 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i16;
+ LD->getMemoryVT() == MVT::i16;
return false;
}]>;
def zextloadi32 : PatFrag<(ops node:$ptr), (ld node:$ptr), [{
if (LoadSDNode *LD = dyn_cast<LoadSDNode>(N))
return LD->getExtensionType() == ISD::ZEXTLOAD &&
LD->getAddressingMode() == ISD::UNINDEXED &&
- LD->getLoadedVT() == MVT::i32;
+ LD->getMemoryVT() == MVT::i32;
return false;
}]>;
}]>;
// truncstore fragments.
-def truncstorei1 : PatFrag<(ops node:$val, node:$ptr),
- (st node:$val, node:$ptr), [{
- if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::i1 &&
- ST->getAddressingMode() == ISD::UNINDEXED;
- return false;
-}]>;
def truncstorei8 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::i8 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i8 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstorei16 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::i16 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i16 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstorei32 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::i32 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i32 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstoref32 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::f32 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f32 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
def truncstoref64 : PatFrag<(ops node:$val, node:$ptr),
(st node:$val, node:$ptr), [{
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N))
- return ST->isTruncatingStore() && ST->getStoredVT() == MVT::f64 &&
+ return ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f64 &&
ST->getAddressingMode() == ISD::UNINDEXED;
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i1;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i1;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i8;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i8;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i16;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i16;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i32;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::PRE_INC || AM == ISD::PRE_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::f32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f32;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i1;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i1;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i8;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i8;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i16;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i16;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::i32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::i32;
}
return false;
}]>;
if (StoreSDNode *ST = dyn_cast<StoreSDNode>(N)) {
ISD::MemIndexedMode AM = ST->getAddressingMode();
return (AM == ISD::POST_INC || AM == ISD::POST_DEC) &&
- ST->isTruncatingStore() && ST->getStoredVT() == MVT::f32;
+ ST->isTruncatingStore() && ST->getMemoryVT() == MVT::f32;
}
return false;
}]>;
+//Atomic patterns
+def atomic_lcs_8 : PatFrag<(ops node:$ptr, node:$cmp, node:$swp),
+ (atomic_lcs node:$ptr, node:$cmp, node:$swp), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i8;
+ return false;
+}]>;
+def atomic_lcs_16 : PatFrag<(ops node:$ptr, node:$cmp, node:$swp),
+ (atomic_lcs node:$ptr, node:$cmp, node:$swp), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i16;
+ return false;
+}]>;
+def atomic_lcs_32 : PatFrag<(ops node:$ptr, node:$cmp, node:$swp),
+ (atomic_lcs node:$ptr, node:$cmp, node:$swp), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i32;
+ return false;
+}]>;
+def atomic_lcs_64 : PatFrag<(ops node:$ptr, node:$cmp, node:$swp),
+ (atomic_lcs node:$ptr, node:$cmp, node:$swp), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i64;
+ return false;
+}]>;
+
+def atomic_las_8 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_las node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i8;
+ return false;
+}]>;
+def atomic_las_16 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_las node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i16;
+ return false;
+}]>;
+def atomic_las_32 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_las node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i32;
+ return false;
+}]>;
+def atomic_las_64 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_las node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i64;
+ return false;
+}]>;
+
+def atomic_swap_8 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_swap node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i8;
+ return false;
+}]>;
+def atomic_swap_16 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_swap node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i16;
+ return false;
+}]>;
+def atomic_swap_32 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_swap node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i32;
+ return false;
+}]>;
+def atomic_swap_64 : PatFrag<(ops node:$ptr, node:$inc),
+ (atomic_swap node:$ptr, node:$inc), [{
+ if (AtomicSDNode* V = dyn_cast<AtomicSDNode>(N))
+ return V->getVT() == MVT::i64;
+ return false;
+}]>;
+
+
+
// setcc convenience fragments.
def setoeq : PatFrag<(ops node:$lhs, node:$rhs),
(setcc node:$lhs, node:$rhs, SETOEQ)>;
//===----------------------------------------------------------------------===//
// Complex pattern definitions.
//
+
+class CPAttribute;
+// Pass the parent Operand as root to CP function rather
+// than the root of the sub-DAG
+def CPAttrParentAsRoot : CPAttribute;
+
// Complex patterns, e.g. X86 addressing mode, requires pattern matching code
// in C++. NumOperands is the number of operands returned by the select function;
// SelectFunc is the name of the function used to pattern match the max. pattern;
// e.g. X86 addressing mode - def addr : ComplexPattern<4, "SelectAddr", [add]>;
//
class ComplexPattern<ValueType ty, int numops, string fn,
- list<SDNode> roots = [], list<SDNodeProperty> props = []> {
+ list<SDNode> roots = [], list<SDNodeProperty> props = [],
+ list<CPAttribute> attrs = []> {
ValueType Ty = ty;
int NumOperands = numops;
string SelectFunc = fn;
list<SDNode> RootNodes = roots;
list<SDNodeProperty> Properties = props;
+ list<CPAttribute> Attributes = attrs;
}
//===----------------------------------------------------------------------===//