reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

lib/Target/AMDGPU/SIISelLowering.cpp
 6160     switch (DAG.getMachineFunction().getFunction().getCallingConv()) {
 6190       DAG.getTargetConstant(Offset, DL, MVT::i16),
 6191       copyToM0(DAG, Chain, DL, M0).getValue(1), // Glue
 6193     return DAG.getMemIntrinsicNode(AMDGPUISD::DS_ORDERED_COUNT, DL,
 6206     return DAG.getAtomic(Opc, SDLoc(Op), M->getMemoryVT(),
 6238     return DAG.getMemIntrinsicNode(Opc, SDLoc(Op), M->getVTList(), Ops,
 6255       DAG.getTargetConstant(Glc | (Slc << 1), DL, MVT::i32), // cachepolicy
 6256       DAG.getTargetConstant(IdxEn, DL, MVT::i1), // idxen
 6259     unsigned Offset = setBufferOffsets(Op.getOperand(4), DAG, &Ops[3]);
 6275                                  M, DAG, Ops);
 6280       return handleByteShortBufferLoads(DAG, LoadVT, DL, Ops, M);
 6283                                M->getMemOperand(), DAG);
 6289     auto Offsets = splitBufferOffsets(Op.getOperand(3), DAG);
 6293       DAG.getConstant(0, DL, MVT::i32), // vindex
 6298       DAG.getTargetConstant(0, DL, MVT::i1), // idxen
 6303     return lowerIntrinsicLoad(M, IsFormat, DAG, Ops);
 6309     auto Offsets = splitBufferOffsets(Op.getOperand(4), DAG);
 6318       DAG.getTargetConstant(1, DL, MVT::i1), // idxen
 6324     return lowerIntrinsicLoad(cast<MemSDNode>(Op), IsFormat, DAG, Ops);
 6344       DAG.getTargetConstant(Dfmt | (Nfmt << 4), DL, MVT::i32), // format
 6345       DAG.getTargetConstant(Glc | (Slc << 1), DL, MVT::i32), // cachepolicy
 6346       DAG.getTargetConstant(IdxEn, DL, MVT::i1) // idxen
 6351                                  M, DAG, Ops);
 6354                                DAG);
 6359     auto Offsets = splitBufferOffsets(Op.getOperand(3), DAG);
 6364       DAG.getConstant(0, DL, MVT::i32), // vindex
 6370       DAG.getTargetConstant(0, DL, MVT::i1), // idxen
 6375                                  M, DAG, Ops);
 6378                                DAG);
 6383     auto Offsets = splitBufferOffsets(Op.getOperand(4), DAG);
 6394       DAG.getTargetConstant(1, DL, MVT::i1), // idxen
 6399                                  M, DAG, Ops);
 6402                                DAG);
 6426       DAG.getTargetConstant(Slc << 1, DL, MVT::i32), // cachepolicy
 6427       DAG.getTargetConstant(IdxEn, DL, MVT::i1), // idxen
 6429     unsigned Offset = setBufferOffsets(Op.getOperand(5), DAG, &Ops[4]);
 6474     return DAG.getMemIntrinsicNode(Opcode, DL, Op->getVTList(), Ops, VT,
 6489     auto Offsets = splitBufferOffsets(Op.getOperand(4), DAG);
 6494       DAG.getConstant(0, DL, MVT::i32), // vindex
 6499       DAG.getTargetConstant(0, DL, MVT::i1), // idxen
 6548     return DAG.getMemIntrinsicNode(Opcode, DL, Op->getVTList(), Ops, VT,
 6563     auto Offsets = splitBufferOffsets(Op.getOperand(5), DAG);
 6573       DAG.getTargetConstant(1, DL, MVT::i1), // idxen
 6623     return DAG.getMemIntrinsicNode(Opcode, DL, Op->getVTList(), Ops, VT,
 6640       DAG.getTargetConstant(Slc << 1, DL, MVT::i32), // cachepolicy
 6641       DAG.getTargetConstant(IdxEn, DL, MVT::i1), // idxen
 6643     unsigned Offset = setBufferOffsets(Op.getOperand(6), DAG, &Ops[5]);
 6651     return DAG.getMemIntrinsicNode(AMDGPUISD::BUFFER_ATOMIC_CMPSWAP, DL,
 6655     auto Offsets = splitBufferOffsets(Op.getOperand(5), DAG);
 6661       DAG.getConstant(0, DL, MVT::i32), // vindex
 6666       DAG.getTargetConstant(0, DL, MVT::i1), // idxen
 6672     return DAG.getMemIntrinsicNode(AMDGPUISD::BUFFER_ATOMIC_CMPSWAP, DL,
 6676     auto Offsets = splitBufferOffsets(Op.getOperand(6), DAG);
 6687       DAG.getTargetConstant(1, DL, MVT::i1), // idxen
 6694     return DAG.getMemIntrinsicNode(AMDGPUISD::BUFFER_ATOMIC_CMPSWAP, DL,
 6701       return lowerImage(Op, ImageDimIntr, DAG);