[PP] Replace some index based for loops with range based ones
While in the area, also change some unsigned variables to size_t, and introduce an LLVM_FALLTHROUGH instead of a comment stating that. Differential Revision: http://reviews.llvm.org/D25982 llvm-svn: 285193
This commit is contained in:
parent
3f515cd795
commit
e4fd6522c1
|
@ -86,7 +86,7 @@ void Preprocessor::EnterCachingLexMode() {
|
|||
const Token &Preprocessor::PeekAhead(unsigned N) {
|
||||
assert(CachedLexPos + N > CachedTokens.size() && "Confused caching.");
|
||||
ExitCachingLexMode();
|
||||
for (unsigned C = CachedLexPos + N - CachedTokens.size(); C > 0; --C) {
|
||||
for (size_t C = CachedLexPos + N - CachedTokens.size(); C > 0; --C) {
|
||||
CachedTokens.push_back(Token());
|
||||
Lex(CachedTokens.back());
|
||||
}
|
||||
|
|
|
@ -40,10 +40,10 @@ bool Preprocessor::isInPrimaryFile() const {
|
|||
// If there are any stacked lexers, we're in a #include.
|
||||
assert(IsFileLexer(IncludeMacroStack[0]) &&
|
||||
"Top level include stack isn't our primary lexer?");
|
||||
for (unsigned i = 1, e = IncludeMacroStack.size(); i != e; ++i)
|
||||
if (IsFileLexer(IncludeMacroStack[i]))
|
||||
return false;
|
||||
return true;
|
||||
return std::none_of(IncludeMacroStack.begin() + 1, IncludeMacroStack.end(),
|
||||
[](const IncludeStackInfo &ISI) -> bool {
|
||||
return IsFileLexer(ISI);
|
||||
});
|
||||
}
|
||||
|
||||
/// getCurrentLexer - Return the current file lexer being lexed from. Note
|
||||
|
@ -54,8 +54,7 @@ PreprocessorLexer *Preprocessor::getCurrentFileLexer() const {
|
|||
return CurPPLexer;
|
||||
|
||||
// Look for a stacked lexer.
|
||||
for (unsigned i = IncludeMacroStack.size(); i != 0; --i) {
|
||||
const IncludeStackInfo& ISI = IncludeMacroStack[i-1];
|
||||
for (const IncludeStackInfo &ISI : llvm::reverse(IncludeMacroStack)) {
|
||||
if (IsFileLexer(ISI))
|
||||
return ISI.ThePPLexer;
|
||||
}
|
||||
|
@ -566,8 +565,7 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
|
|||
// explicit EOD token.
|
||||
PreprocessorLexer *FoundLexer = nullptr;
|
||||
bool LexerWasInPPMode = false;
|
||||
for (unsigned i = 0, e = IncludeMacroStack.size(); i != e; ++i) {
|
||||
IncludeStackInfo &ISI = *(IncludeMacroStack.end()-i-1);
|
||||
for (const IncludeStackInfo &ISI : llvm::reverse(IncludeMacroStack)) {
|
||||
if (ISI.ThePPLexer == nullptr) continue; // Scan for a real lexer.
|
||||
|
||||
// Once we find a real lexer, mark it as raw mode (disabling macro
|
||||
|
|
|
@ -411,8 +411,7 @@ bool Preprocessor::isNextPPTokenLParen() {
|
|||
// macro stack.
|
||||
if (CurPPLexer)
|
||||
return false;
|
||||
for (unsigned i = IncludeMacroStack.size(); i != 0; --i) {
|
||||
IncludeStackInfo &Entry = IncludeMacroStack[i-1];
|
||||
for (const IncludeStackInfo &Entry : llvm::reverse(IncludeMacroStack)) {
|
||||
if (Entry.TheLexer)
|
||||
Val = Entry.TheLexer->isNextPPTokenLParen();
|
||||
else if (Entry.ThePTHLexer)
|
||||
|
@ -501,9 +500,7 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier,
|
|||
} else {
|
||||
Callbacks->MacroExpands(Identifier, M, ExpansionRange, Args);
|
||||
if (!DelayedMacroExpandsCallbacks.empty()) {
|
||||
for (unsigned i = 0, e = DelayedMacroExpandsCallbacks.size(); i != e;
|
||||
++i) {
|
||||
MacroExpandsInfo &Info = DelayedMacroExpandsCallbacks[i];
|
||||
for (const MacroExpandsInfo &Info : DelayedMacroExpandsCallbacks) {
|
||||
// FIXME: We lose macro args info with delayed callback.
|
||||
Callbacks->MacroExpands(Info.Tok, Info.MD, Info.Range,
|
||||
/*Args=*/nullptr);
|
||||
|
@ -757,7 +754,7 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName,
|
|||
assert(Tok.isOneOf(tok::l_paren, tok::comma) &&
|
||||
"only expect argument separators here");
|
||||
|
||||
unsigned ArgTokenStart = ArgTokens.size();
|
||||
size_t ArgTokenStart = ArgTokens.size();
|
||||
SourceLocation ArgStartLoc = Tok.getLocation();
|
||||
|
||||
// C99 6.10.3p11: Keep track of the number of l_parens we have seen. Note
|
||||
|
@ -1009,10 +1006,10 @@ Token *Preprocessor::cacheMacroExpandedTokens(TokenLexer *tokLexer,
|
|||
if (cacheNeedsToGrow) {
|
||||
// Go through all the TokenLexers whose 'Tokens' pointer points in the
|
||||
// buffer and update the pointers to the (potential) new buffer array.
|
||||
for (unsigned i = 0, e = MacroExpandingLexersStack.size(); i != e; ++i) {
|
||||
for (const auto &Lexer : MacroExpandingLexersStack) {
|
||||
TokenLexer *prevLexer;
|
||||
size_t tokIndex;
|
||||
std::tie(prevLexer, tokIndex) = MacroExpandingLexersStack[i];
|
||||
std::tie(prevLexer, tokIndex) = Lexer;
|
||||
prevLexer->Tokens = MacroExpandedTokens.data() + tokIndex;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -281,7 +281,7 @@ void Preprocessor::Handle_Pragma(Token &Tok) {
|
|||
|
||||
// Remove escaped quotes and escapes.
|
||||
unsigned ResultPos = 1;
|
||||
for (unsigned i = 1, e = StrVal.size() - 1; i != e; ++i) {
|
||||
for (size_t i = 1, e = StrVal.size() - 1; i != e; ++i) {
|
||||
// Skip escapes. \\ -> '\' and \" -> '"'.
|
||||
if (StrVal[i] == '\\' && i + 1 < e &&
|
||||
(StrVal[i + 1] == '\\' || StrVal[i + 1] == '"'))
|
||||
|
|
|
@ -232,7 +232,7 @@ bool TokenConcatenation::AvoidConcat(const Token &PrevPrevTok,
|
|||
// it as an identifier.
|
||||
if (!PrevTok.hasUDSuffix())
|
||||
return false;
|
||||
// FALL THROUGH.
|
||||
LLVM_FALLTHROUGH;
|
||||
case tok::identifier: // id+id or id+number or id+L"foo".
|
||||
// id+'.'... will not append.
|
||||
if (Tok.is(tok::numeric_constant))
|
||||
|
|
|
@ -275,7 +275,7 @@ void TokenLexer::ExpandFunctionArguments() {
|
|||
|
||||
// If the arg token expanded into anything, append it.
|
||||
if (ResultArgToks->isNot(tok::eof)) {
|
||||
unsigned FirstResult = ResultToks.size();
|
||||
size_t FirstResult = ResultToks.size();
|
||||
unsigned NumToks = MacroArgs::getArgLength(ResultArgToks);
|
||||
ResultToks.append(ResultArgToks, ResultArgToks+NumToks);
|
||||
|
||||
|
@ -289,8 +289,8 @@ void TokenLexer::ExpandFunctionArguments() {
|
|||
|
||||
// If the '##' came from expanding an argument, turn it into 'unknown'
|
||||
// to avoid pasting.
|
||||
for (unsigned i = FirstResult, e = ResultToks.size(); i != e; ++i) {
|
||||
Token &Tok = ResultToks[i];
|
||||
for (Token &Tok : llvm::make_range(ResultToks.begin() + FirstResult,
|
||||
ResultToks.end())) {
|
||||
if (Tok.is(tok::hashhash))
|
||||
Tok.setKind(tok::unknown);
|
||||
}
|
||||
|
@ -333,9 +333,8 @@ void TokenLexer::ExpandFunctionArguments() {
|
|||
|
||||
// If the '##' came from expanding an argument, turn it into 'unknown'
|
||||
// to avoid pasting.
|
||||
for (unsigned i = ResultToks.size() - NumToks, e = ResultToks.size();
|
||||
i != e; ++i) {
|
||||
Token &Tok = ResultToks[i];
|
||||
for (Token &Tok : llvm::make_range(ResultToks.end() - NumToks,
|
||||
ResultToks.end())) {
|
||||
if (Tok.is(tok::hashhash))
|
||||
Tok.setKind(tok::unknown);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue