++SS.Idx;
if (SS.Idx != ST->getNumElements()) {
const StructLayout *SL = TD.getStructLayout(ST);
- SS.Offset += SL->MemberOffsets[SS.Idx]-SL->MemberOffsets[SS.Idx-1];
+ SS.Offset +=
+ unsigned(SL->MemberOffsets[SS.Idx]-SL->MemberOffsets[SS.Idx-1]);
return;
}
Stack.pop_back(); // At the end of the structure
const ArrayType *AT = cast<ArrayType>(SS.Ty);
++SS.Idx;
if (SS.Idx != AT->getNumElements()) {
- SS.Offset += TD.getTypeSize(AT->getElementType());
+ SS.Offset += unsigned(TD.getTypeSize(AT->getElementType()));
return;
}
Stack.pop_back(); // At the end of the array
assert(SS.Idx < ST->getNumElements());
const StructLayout *SL = TD.getStructLayout(ST);
Stack.push_back(StackState(ST->getElementType(SS.Idx),
- SS.Offset+SL->MemberOffsets[SS.Idx]));
+ SS.Offset+unsigned(SL->MemberOffsets[SS.Idx])));
}
} else {
const ArrayType *AT = cast<ArrayType>(SS.Ty);
assert(SS.Idx < AT->getNumElements());
Stack.push_back(StackState(AT->getElementType(),
SS.Offset+SS.Idx*
- TD.getTypeSize(AT->getElementType())));
+ unsigned(TD.getTypeSize(AT->getElementType()))));
}
}
}
}
// Figure out how big the new type we're merging in is...
- unsigned NewTySize = NewTy->isSized() ? TD.getTypeSize(NewTy) : 0;
+ unsigned NewTySize = NewTy->isSized() ? (unsigned)TD.getTypeSize(NewTy) : 0;
// Otherwise check to see if we can fold this type into the current node. If
// we can't, we fold the node completely, if we can, we potentially update our
// The offset we are looking for must be in the i'th element...
SubType = STy->getElementType(i);
- O += SL.MemberOffsets[i];
+ O += (unsigned)SL.MemberOffsets[i];
break;
}
case Type::ArrayTyID: {
SubType = cast<ArrayType>(SubType)->getElementType();
- unsigned ElSize = TD.getTypeSize(SubType);
+ unsigned ElSize = (unsigned)TD.getTypeSize(SubType);
unsigned Remainder = (Offset-O) % ElSize;
O = Offset-Remainder;
break;
if (isa<FunctionType>(SubType) &&
isa<FunctionType>(NewTy)) return false;
- unsigned SubTypeSize = SubType->isSized() ? TD.getTypeSize(SubType) : 0;
+ unsigned SubTypeSize = SubType->isSized() ?
+ (unsigned)TD.getTypeSize(SubType) : 0;
// Ok, we are getting desperate now. Check for physical subtyping, where we
// just require each element in the node to be compatible.
const StructType *STy = cast<StructType>(SubType);
const StructLayout &SL = *TD.getStructLayout(STy);
if (SL.MemberOffsets.size() > 1)
- NextPadSize = SL.MemberOffsets[1];
+ NextPadSize = (unsigned)SL.MemberOffsets[1];
else
NextPadSize = SubTypeSize;
NextSubType = STy->getElementType(0);
- NextSubTypeSize = TD.getTypeSize(NextSubType);
+ NextSubTypeSize = (unsigned)TD.getTypeSize(NextSubType);
break;
}
case Type::ArrayTyID:
NextSubType = cast<ArrayType>(SubType)->getElementType();
- NextSubTypeSize = TD.getTypeSize(NextSubType);
+ NextSubTypeSize = (unsigned)TD.getTypeSize(NextSubType);
NextPadSize = NextSubTypeSize;
break;
default: ;
for (gep_type_iterator I = gep_type_begin(GEP), E = gep_type_end(GEP);
I != E; ++I)
if (const StructType *STy = dyn_cast<StructType>(*I)) {
- unsigned FieldNo = cast<ConstantUInt>(I.getOperand())->getValue();
- Offset += TD.getStructLayout(STy)->MemberOffsets[FieldNo];
+ unsigned FieldNo =
+ (unsigned)cast<ConstantUInt>(I.getOperand())->getValue();
+ Offset += (unsigned)TD.getStructLayout(STy)->MemberOffsets[FieldNo];
} else if (const PointerType *PTy = dyn_cast<PointerType>(*I)) {
if (!isa<Constant>(I.getOperand()) ||
!cast<Constant>(I.getOperand())->isNullValue())
const StructLayout *SL = TD.getStructLayout(CS->getType());
for (unsigned i = 0, e = CS->getNumOperands(); i != e; ++i) {
DSNode *NHN = NH.getNode();
- DSNodeHandle NewNH(NHN, NH.getOffset()+SL->MemberOffsets[i]);
+ DSNodeHandle NewNH(NHN, NH.getOffset()+(unsigned)SL->MemberOffsets[i]);
MergeConstantInitIntoNode(NewNH, cast<Constant>(CS->getOperand(i)));
}
} else if (isa<ConstantAggregateZero>(C) || isa<UndefValue>(C)) {
class ModRefInfoBuilder : public InstVisitor<ModRefInfoBuilder> {
const DSGraph& funcGraph;
const FunctionModRefInfo& funcModRef;
- struct ModRefTable& modRefTable;
+ class ModRefTable& modRefTable;
ModRefInfoBuilder(); // DO NOT IMPLEMENT
ModRefInfoBuilder(const ModRefInfoBuilder&); // DO NOT IMPLEMENT
/// Add true-dep: U -> D
/// if (HasLoop(S))
/// Add anti-dep: D -> U
+ {
ModRefTable::ref_iterator JI=ModRefCurrent.usersBegin();
ModRefTable::ref_iterator JE = ModRefCurrent.usersBeforeDef_End(II);
for ( ; JI != JE; ++JI)
if (hasLoop)
funcDepGraph->AddSimpleDependence(**JI, **II, AntiDependence);
}
-
+
/// for every use U in UseSetCurrent that was seen *after* D
/// // NOTE: U comes before D in execution order
/// if (U & D)
if (hasLoop)
funcDepGraph->AddSimpleDependence(**II, **JI, TrueDependence);
}
+ }
/// for every def Dnext in DefSetPrev
/// // NOTE: Dnext comes after D in execution order