reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

lib/Target/X86/X86ISelDAGToDAG.cpp
 1850     AM.dump(CurDAG);
 1854     return matchAddressBase(N, AM);
 1859   if (AM.isRIPRelative()) {
 1863     if (!(AM.ES || AM.MCSym) && AM.JT != -1)
 1863     if (!(AM.ES || AM.MCSym) && AM.JT != -1)
 1863     if (!(AM.ES || AM.MCSym) && AM.JT != -1)
 1867       if (!foldOffsetIntoAddress(Cst->getSExtValue(), AM))
 1875     if (!AM.hasSymbolicDisplacement() && AM.Disp == 0)
 1875     if (!AM.hasSymbolicDisplacement() && AM.Disp == 0)
 1878         AM.MCSym = ESNode->getMCSymbol();
 1885     if (!foldOffsetIntoAddress(Val, AM))
 1892     if (!matchWrapper(N, AM))
 1897     if (!matchLoadInAddress(cast<LoadSDNode>(N), AM))
 1902     if (AM.BaseType == X86ISelAddressMode::RegBase &&
 1903         AM.Base_Reg.getNode() == nullptr &&
 1904         (!Subtarget->is64Bit() || isDispSafeForFrameIndex(AM.Disp))) {
 1905       AM.BaseType = X86ISelAddressMode::FrameIndexBase;
 1906       AM.Base_FrameIndex = cast<FrameIndexSDNode>(N)->getIndex();
 1912     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1)
 1912     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1)
 1922         AM.Scale = 1 << Val;
 1929           AM.IndexReg = ShVal.getOperand(0);
 1932           if (!foldOffsetIntoAddress(Disp, AM))
 1936         AM.IndexReg = ShVal;
 1944     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1) break;
 1944     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1) break;
 1965     if (!foldMaskAndShiftToScale(*CurDAG, N, Mask, N, X, AM))
 1978     if (AM.BaseType == X86ISelAddressMode::RegBase &&
 1979         AM.Base_Reg.getNode() == nullptr &&
 1980         AM.IndexReg.getNode() == nullptr) {
 1984           AM.Scale = unsigned(CN->getZExtValue())-1;
 1998             if (foldOffsetIntoAddress(Disp, AM))
 2004           AM.IndexReg = AM.Base_Reg = Reg;
 2004           AM.IndexReg = AM.Base_Reg = Reg;
 2023     X86ISelAddressMode Backup = AM;
 2024     if (matchAddressRecursively(N.getOperand(0), AM, Depth+1)) {
 2026       AM = Backup;
 2031     if (AM.IndexReg.getNode() || AM.isRIPRelative()) {
 2031     if (AM.IndexReg.getNode() || AM.isRIPRelative()) {
 2032       AM = Backup;
 2050     if ((AM.BaseType == X86ISelAddressMode::RegBase && AM.Base_Reg.getNode() &&
 2050     if ((AM.BaseType == X86ISelAddressMode::RegBase && AM.Base_Reg.getNode() &&
 2051          !AM.Base_Reg.getNode()->hasOneUse()) ||
 2052         AM.BaseType == X86ISelAddressMode::FrameIndexBase)
 2056     if ((AM.hasSymbolicDisplacement() && !Backup.hasSymbolicDisplacement()) +
 2057         ((AM.Disp != 0) && (Backup.Disp == 0)) +
 2058         (AM.Segment.getNode() && !Backup.Segment.getNode()) >= 2)
 2062       AM = Backup;
 2069     AM.IndexReg = RHS;
 2070     AM.NegateIndex = true;
 2071     AM.Scale = 1;
 2076     if (!matchAdd(N, AM, Depth))
 2088         !matchAdd(N, AM, Depth))
 2097     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1) break;
 2097     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1) break;
 2114       if (!foldMaskAndShiftToExtract(*CurDAG, N, Mask, Shift, X, AM))
 2118       if (!foldMaskAndShiftToScale(*CurDAG, N, Mask, Shift, X, AM))
 2122       if (!foldMaskedShiftToBEXTR(*CurDAG, N, Mask, Shift, X, AM, *Subtarget))
 2128     if (!foldMaskedShiftToScaledMask(*CurDAG, N, AM))
 2136     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1)
 2136     if (AM.IndexReg.getNode() != nullptr || AM.Scale != 1)
 2161     AM.Scale = 1 << ShAmtC->getZExtValue();
 2162     AM.IndexReg = Zext;
 2172   return matchAddressBase(N, AM);