reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

lib/Transforms/Scalar/LoopStrengthReduce.cpp
 2071   const SCEV *BackedgeTakenCount = SE.getBackedgeTakenCount(L);
 2116     const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(SE.getSCEV(PH));
 2124     if ((int)SE.getTypeSizeInBits(SrcTy) > Mantissa)
 2256   const SCEV *BackedgeTakenCount = SE.getBackedgeTakenCount(L);
 2259   const SCEV *One = SE.getConstant(BackedgeTakenCount->getType(), 1);
 2262   const SCEV *IterationCount = SE.getAddExpr(One, BackedgeTakenCount);
 2263   if (IterationCount != SE.getSCEV(Sel)) return Cond;
 2300   const SCEV *IV = SE.getSCEV(Cond->getOperand(0));
 2304       AR->getStepRecurrence(SE) != One)
 2317          if (BO1->isOne() && SE.getSCEV(BO->getOperand(0)) == MaxRHS)
 2321         if (BO1->isOne() && SE.getSCEV(BO->getOperand(0)) == MaxRHS)
 2325   } else if (SE.getSCEV(Sel->getOperand(1)) == MaxRHS)
 2327   else if (SE.getSCEV(Sel->getOperand(2)) == MaxRHS)
 2431           if (SE.getTypeSizeInBits(A->getType()) !=
 2432               SE.getTypeSizeInBits(B->getType())) {
 2433             if (SE.getTypeSizeInBits(A->getType()) >
 2434                 SE.getTypeSizeInBits(B->getType()))
 2435               B = SE.getSignExtendExpr(B, A->getType());
 2437               A = SE.getSignExtendExpr(A, B->getType());
 2440                 dyn_cast_or_null<SCEVConstant>(getExactSDiv(B, A, SE))) {
 2568   int64_t Offset = ExtractImmediate(Expr, SE);
 2660     Types.insert(SE.getEffectiveSCEVType(Expr->getType()));
 2668           Strides.insert(AR->getStepRecurrence(SE));
 2684       if (SE.getTypeSizeInBits(OldStride->getType()) !=
 2685           SE.getTypeSizeInBits(NewStride->getType())) {
 2686         if (SE.getTypeSizeInBits(OldStride->getType()) >
 2687             SE.getTypeSizeInBits(NewStride->getType()))
 2688           NewStride = SE.getSignExtendExpr(NewStride, OldStride->getType());
 2690           OldStride = SE.getSignExtendExpr(OldStride, NewStride->getType());
 2694                                                         SE, true))) {
 2700                                                                SE, true))) {
 2909   const SCEV *const OperExpr = SE.getSCEV(NextIV);
 2935     const SCEV *PrevExpr = SE.getSCEV(PrevIV);
 2936     const SCEV *IncExpr = SE.getMinusSCEV(OperExpr, PrevExpr);
 2937     if (!SE.isLoopInvariant(IncExpr, L))
 2940     if (Chain.isProfitableIncrement(OperExpr, IncExpr, SE)) {
 3002     if (SE.isSCEVable(OtherUse->getType())
 3003         && !isa<SCEVUnknown>(SE.getSCEV(OtherUse))
 3059       if (SE.isSCEVable(I.getType()) && !isa<SCEVUnknown>(SE.getSCEV(&I)))
 3059       if (SE.isSCEVable(I.getType()) && !isa<SCEVUnknown>(SE.getSCEV(&I)))
 3070       User::op_iterator IVOpIter = findIVOperand(I.op_begin(), IVOpEnd, L, SE);
 3075         IVOpIter = findIVOperand(std::next(IVOpIter), IVOpEnd, L, SE);
 3081     if (!SE.isSCEVable(PN.getType()))
 3094                            ChainUsersVec[UsersIdx].FarUsers, SE))
 3146                                              IVOpEnd, L, SE);
 3159     if (SE.getSCEV(*IVOpIter) == Head.IncExpr
 3160         || SE.getSCEV(IVSrc) == Head.IncExpr) {
 3163     IVOpIter = findIVOperand(std::next(IVOpIter), IVOpEnd, L, SE);
 3174   Type *IntTy = SE.getEffectiveSCEVType(IVTy);
 3187       const SCEV *IncExpr = SE.getNoopOrSignExtend(Inc.IncExpr, IntTy);
 3189         SE.getAddExpr(LeftOverExpr, IncExpr) : IncExpr;
 3195       const SCEV *IVOperExpr = SE.getAddExpr(SE.getUnknown(IVSrc),
 3195       const SCEV *IVOperExpr = SE.getAddExpr(SE.getUnknown(IVSrc),
 3196                                              SE.getUnknown(IncV));
 3208       assert(SE.getTypeSizeInBits(IVTy) >= SE.getTypeSizeInBits(OperTy) &&
 3208       assert(SE.getTypeSizeInBits(IVTy) >= SE.getTypeSizeInBits(OperTy) &&
 3224       if (!PostIncV || (SE.getSCEV(PostIncV) != SE.getSCEV(IVSrc)))
 3224       if (!PostIncV || (SE.getSCEV(PostIncV) != SE.getSCEV(IVSrc)))
 3242   bool SaveCmp = TTI.canSaveCmp(L, &ExitBranch, &SE, &LI, &DT, &AC, &LibInfo);
 3288         const SCEV *N = SE.getSCEV(NV);
 3289         if (SE.isLoopInvariant(N, L) && isSafeToExpand(N, SE)) {
 3289         if (SE.isLoopInvariant(N, L) && isSafeToExpand(N, SE)) {
 3292           N = normalizeForPostIncUse(N, TmpPostIncLoops, SE);
 3294           S = SE.getMinusSCEV(N, S);
 3321         SE.getTypeSizeInBits(LU.WidestFixupType) <
 3322         SE.getTypeSizeInBits(LF.OperandValToReplace->getType()))
 3340   if (!isSafeToExpand(S, SE))
 3344   F.initialMatch(S, L, SE);
 3439         if (SE.isSCEVable(UserInst->getType())) {
 3440           const SCEV *UserS = SE.getSCEV(const_cast<Instruction *>(UserInst));
 3446               SE.getUnknown(const_cast<Instruction *>(UserInst)));
 3454           if (SE.hasComputableLoopEvolution(SE.getSCEV(OtherOp), L))
 3454           if (SE.hasComputableLoopEvolution(SE.getSCEV(OtherOp), L))
 3469             SE.getTypeSizeInBits(LU.WidestFixupType) <
 3470             SE.getTypeSizeInBits(LF.OperandValToReplace->getType()))
 3578   if (TTI.shouldFavorPostInc() && mayUsePostIncMode(TTI, LU, BaseReg, L, SE))
 3581   const SCEV *Remainder = CollectSubexprs(BaseReg, nullptr, AddOps, L, SE);
 3593     if (isa<SCEVUnknown>(*J) && !SE.isLoopInvariant(*J, L))
 3598     if (isAlwaysFoldable(TTI, SE, LU.MinOffset, LU.MaxOffset, LU.Kind,
 3611         isAlwaysFoldable(TTI, SE, LU.MinOffset, LU.MaxOffset, LU.Kind,
 3615     const SCEV *InnerSum = SE.getAddExpr(InnerAddOps);
 3622     if (InnerSumSC && SE.getTypeSizeInBits(InnerSumSC->getType()) <= 64 &&
 3638     if (SC && SE.getTypeSizeInBits(SC->getType()) <= 64 &&
 3694     if (SE.properlyDominates(BaseReg, L->getHeader()) &&
 3695         !SE.hasComputableLoopEvolution(BaseReg, L)) {
 3697         CombinedIntegerType = SE.getEffectiveSCEVType(BaseReg->getType());
 3727     GenerateFormula(SE.getAddExpr(OpsCopy));
 3734     Ops.push_back(SE.getConstant(CombinedIntegerType, NewBase.UnfoldedOffset,
 3737     GenerateFormula(SE.getAddExpr(Ops));
 3746   GlobalValue *GV = ExtractSymbol(G, SE);
 3785       const SCEV *NewG = SE.getAddExpr(SE.getConstant(G->getType(), Offset), G);
 3785       const SCEV *NewG = SE.getAddExpr(SE.getConstant(G->getType(), Offset), G);
 3816           dyn_cast<SCEVConstant>(GAR->getStepRecurrence(SE))) {
 3831   int64_t Imm = ExtractImmediate(G, SE);
 3871   if (SE.getTypeSizeInBits(IntTy) > 64) return;
 3919     const SCEV *FactorS = SE.getConstant(IntTy, Factor);
 3923       F.BaseRegs[i] = SE.getMulExpr(F.BaseRegs[i], FactorS);
 3924       if (getExactSDiv(F.BaseRegs[i], FactorS, SE) != Base.BaseRegs[i])
 3930       F.ScaledReg = SE.getMulExpr(F.ScaledReg, FactorS);
 3931       if (getExactSDiv(F.ScaledReg, FactorS, SE) != Base.ScaledReg)
 3995         const SCEV *FactorS = SE.getConstant(IntTy, Factor);
 4000         if (const SCEV *Quotient = getExactSDiv(AR, FactorS, SE, true)) {
 4030   DstTy = SE.getEffectiveSCEVType(DstTy);
 4041         const SCEV *NewScaledReg = SE.getAnyExtendExpr(F.ScaledReg, SrcTy);
 4048         const SCEV *NewBaseReg = SE.getAnyExtendExpr(BaseReg, SrcTy);
 4110     int64_t Imm = ExtractImmediate(Reg, SE);
 4190     Type *IntTy = SE.getEffectiveSCEVType(OrigReg->getType());
 4191     const SCEV *NegImmS = SE.getSCEV(ConstantInt::get(IntTy, -(uint64_t)Imm));
 4192     unsigned BitWidth = SE.getTypeSizeInBits(IntTy);
 4206         if (F.referencesReg(SE.getSCEV(
 4214         NewF.ScaledReg = SE.getAddExpr(NegImmS, NewF.ScaledReg);
 4239                 mayUsePostIncMode(TTI, LU, OrigReg, this->L, SE))
 4246           NewF.BaseRegs[N] = SE.getAddExpr(NegImmS, BaseReg);
 4341       Cost CostF(L, SE, TTI);
 4374         Cost CostBest(L, SE, TTI);
 4627       Cost CostFA(L, SE, TTI);
 4628       Cost CostFB(L, SE, TTI);
 4920   Cost NewCost(L, SE, TTI);
 4971   Cost SolutionCost(L, SE, TTI);
 4973   Cost CurCost(L, SE, TTI);
 5150   else if (SE.getEffectiveSCEVType(Ty) == SE.getEffectiveSCEVType(OpTy))
 5150   else if (SE.getEffectiveSCEVType(Ty) == SE.getEffectiveSCEVType(OpTy))
 5154   Type *IntTy = SE.getEffectiveSCEVType(Ty);
 5164     Reg = denormalizeForPostIncUse(Reg, LF.PostIncLoops, SE);
 5165     Ops.push_back(SE.getUnknown(Rewriter.expandCodeFor(Reg, nullptr)));
 5175     ScaledS = denormalizeForPostIncUse(ScaledS, Loops, SE);
 5181             SE.getUnknown(Rewriter.expandCodeFor(ScaledS, nullptr)));
 5198         Value *FullV = Rewriter.expandCodeFor(SE.getAddExpr(Ops), nullptr);
 5200         Ops.push_back(SE.getUnknown(FullV));
 5202       ScaledS = SE.getUnknown(Rewriter.expandCodeFor(ScaledS, nullptr));
 5205             SE.getMulExpr(ScaledS, SE.getConstant(ScaledS->getType(), F.Scale));
 5205             SE.getMulExpr(ScaledS, SE.getConstant(ScaledS->getType(), F.Scale));
 5214       Value *FullV = Rewriter.expandCodeFor(SE.getAddExpr(Ops), Ty);
 5216       Ops.push_back(SE.getUnknown(FullV));
 5218     Ops.push_back(SE.getUnknown(F.BaseGV));
 5224     Value *FullV = Rewriter.expandCodeFor(SE.getAddExpr(Ops), Ty);
 5226     Ops.push_back(SE.getUnknown(FullV));
 5238         Ops.push_back(SE.getUnknown(ICmpScaledV));
 5244       Ops.push_back(SE.getUnknown(ConstantInt::getSigned(IntTy, Offset)));
 5252     Ops.push_back(SE.getUnknown(ConstantInt::getSigned(IntTy,
 5258                       SE.getConstant(IntTy, 0) :
 5259                       SE.getAddExpr(Ops);
 5288       Constant *C = ConstantInt::getSigned(SE.getEffectiveSCEVType(OpTy),
 5462   SCEVExpander Rewriter(SE, L->getHeader()->getModule()->getDataLayout(),