reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

lib/Analysis/ScalarEvolution.cpp
 3551   assert(!Ops.empty() && "Cannot get empty (u|s)(min|max)!");
 3552   if (Ops.size() == 1) return Ops[0];
 3552   if (Ops.size() == 1) return Ops[0];
 3554   Type *ETy = getEffectiveSCEVType(Ops[0]->getType());
 3555   for (unsigned i = 1, e = Ops.size(); i != e; ++i)
 3556     assert(getEffectiveSCEVType(Ops[i]->getType()) == ETy &&
 3564   GroupByComplexity(Ops, &LI, DT);
 3567   if (const SCEV *S = std::get<0>(findExistingSCEVInCache(Kind, Ops))) {
 3573   if (const SCEVConstant *LHSC = dyn_cast<SCEVConstant>(Ops[0])) {
 3575     assert(Idx < Ops.size());
 3588     while (const SCEVConstant *RHSC = dyn_cast<SCEVConstant>(Ops[Idx])) {
 3592       Ops[0] = getConstant(Fold);
 3593       Ops.erase(Ops.begin()+1);  // Erase the folded element
 3593       Ops.erase(Ops.begin()+1);  // Erase the folded element
 3594       if (Ops.size() == 1) return Ops[0];
 3594       if (Ops.size() == 1) return Ops[0];
 3595       LHSC = cast<SCEVConstant>(Ops[0]);
 3603       Ops.erase(Ops.begin());
 3603       Ops.erase(Ops.begin());
 3611     if (Ops.size() == 1) return Ops[0];
 3611     if (Ops.size() == 1) return Ops[0];
 3615   while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < Kind)
 3615   while (Idx < Ops.size() && Ops[Idx]->getSCEVType() < Kind)
 3620   if (Idx < Ops.size()) {
 3622     while (Ops[Idx]->getSCEVType() == Kind) {
 3623       const SCEVMinMaxExpr *SMME = cast<SCEVMinMaxExpr>(Ops[Idx]);
 3624       Ops.erase(Ops.begin()+Idx);
 3624       Ops.erase(Ops.begin()+Idx);
 3625       Ops.append(SMME->op_begin(), SMME->op_end());
 3630       return getMinMaxExpr(Kind, Ops);
 3642   for (unsigned i = 0, e = Ops.size() - 1; i != e; ++i) {
 3643     if (Ops[i] == Ops[i + 1] ||
 3643     if (Ops[i] == Ops[i + 1] ||
 3644         isKnownViaNonRecursiveReasoning(FirstPred, Ops[i], Ops[i + 1])) {
 3644         isKnownViaNonRecursiveReasoning(FirstPred, Ops[i], Ops[i + 1])) {
 3647       Ops.erase(Ops.begin() + i + 1, Ops.begin() + i + 2);
 3647       Ops.erase(Ops.begin() + i + 1, Ops.begin() + i + 2);
 3647       Ops.erase(Ops.begin() + i + 1, Ops.begin() + i + 2);
 3650     } else if (isKnownViaNonRecursiveReasoning(SecondPred, Ops[i],
 3651                                                Ops[i + 1])) {
 3653       Ops.erase(Ops.begin() + i, Ops.begin() + i + 1);
 3653       Ops.erase(Ops.begin() + i, Ops.begin() + i + 1);
 3653       Ops.erase(Ops.begin() + i, Ops.begin() + i + 1);
 3659   if (Ops.size() == 1) return Ops[0];
 3659   if (Ops.size() == 1) return Ops[0];
 3661   assert(!Ops.empty() && "Reduced smax down to nothing!");
 3668   std::tie(ExistingSCEV, ID, IP) = findExistingSCEVInCache(Kind, Ops);
 3671   const SCEV **O = SCEVAllocator.Allocate<const SCEV *>(Ops.size());
 3672   std::uninitialized_copy(Ops.begin(), Ops.end(), O);
 3672   std::uninitialized_copy(Ops.begin(), Ops.end(), O);
 3674       ID.Intern(SCEVAllocator), static_cast<SCEVTypes>(Kind), O, Ops.size());