reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced
    1
    2
    3
    4
    5
    6
    7
    8
    9
   10
   11
   12
   13
   14
   15
   16
   17
   18
   19
   20
   21
   22
   23
   24
   25
   26
   27
   28
   29
   30
   31
   32
   33
   34
   35
   36
   37
   38
   39
   40
   41
   42
   43
   44
   45
   46
   47
   48
   49
   50
   51
   52
   53
   54
   55
   56
   57
   58
   59
   60
   61
   62
   63
   64
   65
   66
   67
   68
   69
   70
   71
   72
   73
   74
   75
   76
   77
   78
   79
   80
   81
   82
   83
   84
   85
   86
   87
   88
   89
   90
   91
   92
   93
   94
   95
   96
   97
   98
   99
//===- TokenRewriter.cpp - Token-based code rewriting interface -----------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
//  This file implements the TokenRewriter class, which is used for code
//  transformations.
//
//===----------------------------------------------------------------------===//

#include "clang/Rewrite/Core/TokenRewriter.h"
#include "clang/Basic/SourceManager.h"
#include "clang/Lex/Lexer.h"
#include "clang/Lex/ScratchBuffer.h"
#include "clang/Lex/Token.h"
#include <cassert>
#include <cstring>
#include <map>
#include <utility>

using namespace clang;

TokenRewriter::TokenRewriter(FileID FID, SourceManager &SM,
                             const LangOptions &LangOpts) {
  ScratchBuf.reset(new ScratchBuffer(SM));

  // Create a lexer to lex all the tokens of the main file in raw mode.
  const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID);
  Lexer RawLex(FID, FromFile, SM, LangOpts);

  // Return all comments and whitespace as tokens.
  RawLex.SetKeepWhitespaceMode(true);

  // Lex the file, populating our datastructures.
  Token RawTok;
  RawLex.LexFromRawLexer(RawTok);
  while (RawTok.isNot(tok::eof)) {
#if 0
    if (Tok.is(tok::raw_identifier)) {
      // Look up the identifier info for the token.  This should use
      // IdentifierTable directly instead of PP.
      PP.LookUpIdentifierInfo(Tok);
    }
#endif

    AddToken(RawTok, TokenList.end());
    RawLex.LexFromRawLexer(RawTok);
  }
}

TokenRewriter::~TokenRewriter() = default;

/// RemapIterator - Convert from token_iterator (a const iterator) to
/// TokenRefTy (a non-const iterator).
TokenRewriter::TokenRefTy TokenRewriter::RemapIterator(token_iterator I) {
  if (I == token_end()) return TokenList.end();

  // FIXME: This is horrible, we should use our own list or something to avoid
  // this.
  std::map<SourceLocation, TokenRefTy>::iterator MapIt =
    TokenAtLoc.find(I->getLocation());
  assert(MapIt != TokenAtLoc.end() && "iterator not in rewriter?");
  return MapIt->second;
}

/// AddToken - Add the specified token into the Rewriter before the other
/// position.
TokenRewriter::TokenRefTy
TokenRewriter::AddToken(const Token &T, TokenRefTy Where) {
  Where = TokenList.insert(Where, T);

  bool InsertSuccess = TokenAtLoc.insert(std::make_pair(T.getLocation(),
                                                        Where)).second;
  assert(InsertSuccess && "Token location already in rewriter!");
  (void)InsertSuccess;
  return Where;
}

TokenRewriter::token_iterator
TokenRewriter::AddTokenBefore(token_iterator I, const char *Val) {
  unsigned Len = strlen(Val);

  // Plop the string into the scratch buffer, then create a token for this
  // string.
  Token Tok;
  Tok.startToken();
  const char *Spelling;
  Tok.setLocation(ScratchBuf->getToken(Val, Len, Spelling));
  Tok.setLength(Len);

  // TODO: Form a whole lexer around this and relex the token!  For now, just
  // set kind to tok::unknown.
  Tok.setKind(tok::unknown);

  return AddToken(Tok, RemapIterator(I));
}