@@ -1036,81 +1036,110 @@ const SCEV *SCEVAddRecExpr::evaluateAtIteration(const SCEV *It,
1036
1036
const SCEV *ScalarEvolution::getPtrToIntExpr(const SCEV *Op, Type *Ty,
1037
1037
unsigned Depth) {
1038
1038
assert(Ty->isIntegerTy() && "Target type must be an integer type!");
1039
+ assert(Depth <= 1 && "getPtrToIntExpr() should self-recurse at most once.");
1039
1040
1040
1041
// We could be called with an integer-typed operands during SCEV rewrites.
1041
1042
// Since the operand is an integer already, just perform zext/trunc/self cast.
1042
1043
if (!Op->getType()->isPointerTy())
1043
1044
return getTruncateOrZeroExtend(Op, Ty);
1044
1045
1046
+ // What would be an ID for such a SCEV cast expression?
1045
1047
FoldingSetNodeID ID;
1046
1048
ID.AddInteger(scPtrToInt);
1047
1049
ID.AddPointer(Op);
1050
+
1048
1051
void *IP = nullptr;
1052
+
1053
+ // Is there already an expression for such a cast?
1049
1054
if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP))
1050
- return getTruncateOrZeroExtend(S, Ty, Depth);
1051
-
1052
- assert((isa<SCEVNAryExpr>(Op) || isa<SCEVUnknown>(Op)) &&
1053
- "We can only gen an nary expression, or an unknown here.");
1054
-
1055
- Type *IntPtrTy = getDataLayout().getIntPtrType(Op->getType());
1056
-
1057
- // If the input operand is not an unknown (and thus is an nary expression),
1058
- // sink the cast to operands, so that the operation is performed on integers,
1059
- // and we eventually end up with just an ptrtoint(unknown).
1060
- if (const SCEVNAryExpr *NaryExpr = dyn_cast<SCEVNAryExpr>(Op)) {
1061
- SmallVector<const SCEV *, 2> NewOps;
1062
- NewOps.reserve(NaryExpr->getNumOperands());
1063
- for (const SCEV *Op : NaryExpr->operands())
1064
- NewOps.push_back(Op->getType()->isPointerTy()
1065
- ? getPtrToIntExpr(Op, IntPtrTy, Depth + 1)
1066
- : Op);
1067
- const SCEV *NewNaryExpr = nullptr;
1068
- switch (SCEVTypes SCEVType = NaryExpr->getSCEVType()) {
1069
- case scAddExpr:
1070
- NewNaryExpr = getAddExpr(NewOps, NaryExpr->getNoWrapFlags(), Depth + 1);
1071
- break;
1072
- case scAddRecExpr:
1073
- NewNaryExpr =
1074
- getAddRecExpr(NewOps, cast<SCEVAddRecExpr>(NaryExpr)->getLoop(),
1075
- NaryExpr->getNoWrapFlags());
1076
- break;
1077
- case scUMaxExpr:
1078
- case scSMaxExpr:
1079
- case scUMinExpr:
1080
- case scSMinExpr:
1081
- NewNaryExpr = getMinMaxExpr(SCEVType, NewOps);
1082
- break;
1055
+ return getTruncateOrZeroExtend(S, Ty);
1056
+
1057
+ // If not, is this expression something we can't reduce any further?
1058
+ if (isa<SCEVUnknown>(Op)) {
1059
+ // Create an explicit cast node.
1060
+ // We can reuse the existing insert position since if we get here,
1061
+ // we won't have made any changes which would invalidate it.
1062
+ Type *IntPtrTy = getDataLayout().getIntPtrType(Op->getType());
1063
+ assert(getDataLayout().getTypeSizeInBits(getEffectiveSCEVType(
1064
+ Op->getType())) == getDataLayout().getTypeSizeInBits(IntPtrTy) &&
1065
+ "We can only model ptrtoint if SCEV's effective (integer) type is "
1066
+ "sufficiently wide to represent all possible pointer values.");
1067
+ SCEV *S = new (SCEVAllocator)
1068
+ SCEVPtrToIntExpr(ID.Intern(SCEVAllocator), Op, IntPtrTy);
1069
+ UniqueSCEVs.InsertNode(S, IP);
1070
+ addToLoopUseLists(S);
1071
+ return getTruncateOrZeroExtend(S, Ty);
1072
+ }
1083
1073
1084
- case scMulExpr:
1085
- NewNaryExpr = getMulExpr(NewOps, NaryExpr->getNoWrapFlags(), Depth + 1);
1086
- break;
1087
- case scUDivExpr:
1088
- NewNaryExpr = getUDivExpr(NewOps[0], NewOps[1]);
1089
- break;
1090
- case scConstant:
1091
- case scTruncate:
1092
- case scZeroExtend:
1093
- case scSignExtend:
1094
- case scPtrToInt:
1095
- case scUnknown:
1096
- case scCouldNotCompute:
1097
- llvm_unreachable("We can't get these types here.");
1074
+ assert(Depth == 0 &&
1075
+ "getPtrToIntExpr() should not self-recurse for non-SCEVUnknown's.");
1076
+
1077
+ // Otherwise, we've got some expression that is more complex than just a
1078
+ // single SCEVUnknown. But we don't want to have a SCEVPtrToIntExpr of an
1079
+ // arbitrary expression, we want to have SCEVPtrToIntExpr of an SCEVUnknown
1080
+ // only, and the expressions must otherwise be integer-typed.
1081
+ // So sink the cast down to the SCEVUnknown's.
1082
+
1083
+ /// The SCEVPtrToIntSinkingRewriter takes a scalar evolution expression,
1084
+ /// which computes a pointer-typed value, and rewrites the whole expression
1085
+ /// tree so that *all* the computations are done on integers, and the only
1086
+ /// pointer-typed operands in the expression are SCEVUnknown.
1087
+ class SCEVPtrToIntSinkingRewriter
1088
+ : public SCEVRewriteVisitor<SCEVPtrToIntSinkingRewriter> {
1089
+ using Base = SCEVRewriteVisitor<SCEVPtrToIntSinkingRewriter>;
1090
+
1091
+ public:
1092
+ SCEVPtrToIntSinkingRewriter(ScalarEvolution &SE) : SCEVRewriteVisitor(SE) {}
1093
+
1094
+ static const SCEV *rewrite(const SCEV *Scev, ScalarEvolution &SE) {
1095
+ SCEVPtrToIntSinkingRewriter Rewriter(SE);
1096
+ return Rewriter.visit(Scev);
1098
1097
}
1099
- return getTruncateOrZeroExtend(NewNaryExpr, Ty, Depth);
1100
- }
1101
1098
1102
- // The cast wasn't folded; create an explicit cast node. We can reuse
1103
- // the existing insert position since if we get here, we won't have
1104
- // made any changes which would invalidate it.
1105
- assert(getDataLayout().getTypeSizeInBits(getEffectiveSCEVType(
1106
- Op->getType())) == getDataLayout().getTypeSizeInBits(IntPtrTy) &&
1107
- "We can only model ptrtoint if SCEV's effective (integer) type is "
1108
- "sufficiently wide to represent all possible pointer values.");
1109
- SCEV *S = new (SCEVAllocator)
1110
- SCEVPtrToIntExpr(ID.Intern(SCEVAllocator), Op, IntPtrTy);
1111
- UniqueSCEVs.InsertNode(S, IP);
1112
- addToLoopUseLists(S);
1113
- return getTruncateOrZeroExtend(S, Ty, Depth);
1099
+ const SCEV *visit(const SCEV *S) {
1100
+ Type *STy = S->getType();
1101
+ // If the expression is not pointer-typed, just keep it as-is.
1102
+ if (!STy->isPointerTy())
1103
+ return S;
1104
+ // Else, recursively sink the cast down into it.
1105
+ return Base::visit(S);
1106
+ }
1107
+
1108
+ const SCEV *visitAddExpr(const SCEVAddExpr *Expr) {
1109
+ SmallVector<const SCEV *, 2> Operands;
1110
+ bool Changed = false;
1111
+ for (auto *Op : Expr->operands()) {
1112
+ Operands.push_back(visit(Op));
1113
+ Changed |= Op != Operands.back();
1114
+ }
1115
+ return !Changed ? Expr : SE.getAddExpr(Operands, Expr->getNoWrapFlags());
1116
+ }
1117
+
1118
+ const SCEV *visitMulExpr(const SCEVMulExpr *Expr) {
1119
+ SmallVector<const SCEV *, 2> Operands;
1120
+ bool Changed = false;
1121
+ for (auto *Op : Expr->operands()) {
1122
+ Operands.push_back(visit(Op));
1123
+ Changed |= Op != Operands.back();
1124
+ }
1125
+ return !Changed ? Expr : SE.getMulExpr(Operands, Expr->getNoWrapFlags());
1126
+ }
1127
+
1128
+ const SCEV *visitUnknown(const SCEVUnknown *Expr) {
1129
+ Type *ExprPtrTy = Expr->getType();
1130
+ assert(ExprPtrTy->isPointerTy() &&
1131
+ "Should only reach pointer-typed SCEVUnknown's.");
1132
+ Type *ExprIntPtrTy = SE.getDataLayout().getIntPtrType(ExprPtrTy);
1133
+ return SE.getPtrToIntExpr(Expr, ExprIntPtrTy, /*Depth=*/1);
1134
+ }
1135
+ };
1136
+
1137
+ // And actually perform the cast sinking.
1138
+ const SCEV *IntOp = SCEVPtrToIntSinkingRewriter::rewrite(Op, *this);
1139
+ assert(IntOp->getType()->isIntegerTy() &&
1140
+ "We must have succeeded in sinking the cast, "
1141
+ "and ending up with an integer-typed expression!");
1142
+ return getTruncateOrZeroExtend(IntOp, Ty);
1114
1143
}
1115
1144
1116
1145
const SCEV *ScalarEvolution::getTruncateExpr(const SCEV *Op, Type *Ty,
0 commit comments