@@ -2997,29 +2997,64 @@ Value *InstCombiner::getFreelyInvertedImpl(Value *V, bool WillInvertAllUses,
2997
2997
return nullptr ;
2998
2998
}
2999
2999
3000
- // / Return true if we should lower multi-dimensional geps
3001
- static bool ismultiDimGep (GetElementPtrInst &GEP) {
3002
- // Limit handling to only 3D and 4D arrays with integer types.
3003
- // getelementptr [9 x [9 x [9 x i32]]], ptr @arr, i64 0, i64 %i, i64 2, i64 3
3004
- unsigned NumOps = GEP.getNumOperands ();
3000
+ // / Accumulate constant indices from GEPs with all-constant indices, then
3001
+ // / check if the outermost GEP (with one variable index) is flattenable.
3002
+ // / Matches and returns true for multi-dimensional array geps with only one
3003
+ // / variable index. The pointer could also be another gep with all constant
3004
+ // / indices. For ex:
3005
+ // / -getelementptr [9 x [9 x [9 x i32]]], ptr @arr, i64 0, i64 %i, i64 2, i64 3
3006
+ // / -getelementptr [9 x [9 x [9 x i32]]],
3007
+ // / <another gep>, i64 0, i64 %i, i64 2, i64 3
3008
+ static bool ismultiDimGepFlattenable (const GetElementPtrInst &GEP) {
3009
+ // Collect all indices, outermost last
3010
+ SmallVector<const GEPOperator *, 4 > GEPChain;
3011
+ const Value *Base = &GEP;
3012
+
3013
+ // Go over GEPs with all constant indices
3014
+ while (auto *CurGep = dyn_cast<GEPOperator>(Base)) {
3015
+ bool AllConst = true ;
3016
+ for (unsigned I = 1 ; I < CurGep->getNumOperands (); ++I)
3017
+ if (!isa<ConstantInt>(CurGep->getOperand (I)))
3018
+ AllConst = false ;
3019
+ if (!AllConst)
3020
+ break ;
3021
+ GEPChain.push_back (CurGep);
3022
+ Base = CurGep->getOperand (0 )->stripPointerCasts ();
3023
+ }
3024
+
3025
+ // Accumulate all indices from innermost to outermost
3026
+ SmallVector<Value *, 8 > Indices;
3027
+ for (int I = GEPChain.size () - 1 ; I >= 0 ; --I) {
3028
+ const GEPOperator *GO = GEPChain[I];
3029
+ for (unsigned J = 1 ; J < GO->getNumOperands (); ++J)
3030
+ Indices.push_back (GO->getOperand (J));
3031
+ }
3032
+
3033
+ // Add indices from the main GEP (skip pointer operand)
3034
+ for (unsigned J = 1 ; J < GEP.getNumOperands (); ++J)
3035
+ Indices.push_back (GEP.getOperand (J));
3036
+
3037
+ if (Indices.empty ())
3038
+ return false ;
3005
3039
3006
3040
// First index must be constant zero (array base)
3007
- if (!isa<ConstantInt>(GEP.getOperand (1 )) ||
3008
- !cast<ConstantInt>(GEP.getOperand (1 ))->isZero ())
3041
+ if (!isa<ConstantInt>(Indices[0 ]) || !cast<ConstantInt>(Indices[0 ])->isZero ())
3009
3042
return false ;
3010
3043
3044
+ unsigned NumDims = Indices.size () - 1 ;
3045
+
3011
3046
// Limit lowering for arrays with 3 or more dimensions
3012
- if (NumOps < 5 )
3047
+ if (NumDims < 3 )
3013
3048
return false ;
3014
3049
3015
3050
// Check that it's arrays all the way
3016
3051
Type *CurTy = GEP.getSourceElementType ();
3017
3052
unsigned NumVar = 0 ;
3018
- for (unsigned I = 2 ; I < NumOps ; ++I) {
3053
+ for (unsigned I = 1 ; I < Indices. size () ; ++I) {
3019
3054
auto *ArrTy = dyn_cast<ArrayType>(CurTy);
3020
3055
if (!ArrTy)
3021
3056
return false ;
3022
- if (!isa<ConstantInt>(GEP. getOperand (I) ))
3057
+ if (!isa<ConstantInt>(Indices[I] ))
3023
3058
++NumVar;
3024
3059
CurTy = ArrTy->getElementType ();
3025
3060
}
@@ -3054,7 +3089,7 @@ static bool shouldCanonicalizeGEPToPtrAdd(GetElementPtrInst &GEP) {
3054
3089
m_Shl (m_Value (), m_ConstantInt ())))))
3055
3090
return true ;
3056
3091
3057
- if (ismultiDimGep (GEP))
3092
+ if (ismultiDimGepFlattenable (GEP))
3058
3093
return true ;
3059
3094
3060
3095
// gep (gep %p, C1), %x, C2 is expanded so the two constants can
0 commit comments