parent
f3a5a5c546
commit
d2d442ca73
|
@ -79,10 +79,10 @@ static inline unsigned HashHMapKey(StringRef Str) {
|
|||
const HeaderMap *HeaderMap::Create(const FileEntry *FE, FileManager &FM) {
|
||||
// If the file is too small to be a header map, ignore it.
|
||||
unsigned FileSize = FE->getSize();
|
||||
if (FileSize <= sizeof(HMapHeader)) return 0;
|
||||
if (FileSize <= sizeof(HMapHeader)) return nullptr;
|
||||
|
||||
std::unique_ptr<const llvm::MemoryBuffer> FileBuffer(FM.getBufferForFile(FE));
|
||||
if (!FileBuffer) return 0; // Unreadable file?
|
||||
if (!FileBuffer) return nullptr; // Unreadable file?
|
||||
const char *FileStart = FileBuffer->getBufferStart();
|
||||
|
||||
// We know the file is at least as big as the header, check it now.
|
||||
|
@ -98,9 +98,9 @@ const HeaderMap *HeaderMap::Create(const FileEntry *FE, FileManager &FM) {
|
|||
Header->Version == llvm::ByteSwap_16(HMAP_HeaderVersion))
|
||||
NeedsByteSwap = true; // Mixed endianness headermap.
|
||||
else
|
||||
return 0; // Not a header map.
|
||||
return nullptr; // Not a header map.
|
||||
|
||||
if (Header->Reserved != 0) return 0;
|
||||
if (Header->Reserved != 0) return nullptr;
|
||||
|
||||
// Okay, everything looks good, create the header map.
|
||||
return new HeaderMap(FileBuffer.release(), NeedsByteSwap);
|
||||
|
@ -165,7 +165,7 @@ const char *HeaderMap::getString(unsigned StrTabIdx) const {
|
|||
|
||||
// Check for invalid index.
|
||||
if (StrTabIdx >= FileBuffer->getBufferSize())
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
// Otherwise, we have a valid pointer into the file. Just return it. We know
|
||||
// that the "string" can not overrun the end of the file, because the buffer
|
||||
|
@ -205,7 +205,7 @@ const FileEntry *HeaderMap::LookupFile(
|
|||
SmallString<1024> Path;
|
||||
StringRef Dest = lookupFilename(Filename, Path);
|
||||
if (Dest.empty())
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
return FM.getFile(Dest);
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ HeaderFileInfo::getControllingMacro(ExternalIdentifierLookup *External) {
|
|||
return ControllingMacro;
|
||||
|
||||
if (!ControllingMacroID || !External)
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
ControllingMacro = External->GetIdentifier(ControllingMacroID);
|
||||
return ControllingMacro;
|
||||
|
@ -55,8 +55,8 @@ HeaderSearch::HeaderSearch(IntrusiveRefCntPtr<HeaderSearchOptions> HSOpts,
|
|||
SystemDirIdx = 0;
|
||||
NoCurDirSearch = false;
|
||||
|
||||
ExternalLookup = 0;
|
||||
ExternalSource = 0;
|
||||
ExternalLookup = nullptr;
|
||||
ExternalSource = nullptr;
|
||||
NumIncluded = 0;
|
||||
NumMultiIncludeFileOptzn = 0;
|
||||
NumFrameworkLookups = NumSubFrameworkLookups = 0;
|
||||
|
@ -110,7 +110,7 @@ const HeaderMap *HeaderSearch::CreateHeaderMap(const FileEntry *FE) {
|
|||
return HM;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::string HeaderSearch::getModuleFileName(Module *Module) {
|
||||
|
@ -277,12 +277,12 @@ const FileEntry *DirectoryLookup::LookupFile(
|
|||
// Concatenate the requested file onto the directory.
|
||||
TmpDir = getDir()->getName();
|
||||
llvm::sys::path::append(TmpDir, Filename);
|
||||
if (SearchPath != NULL) {
|
||||
if (SearchPath) {
|
||||
StringRef SearchPathRef(getDir()->getName());
|
||||
SearchPath->clear();
|
||||
SearchPath->append(SearchPathRef.begin(), SearchPathRef.end());
|
||||
}
|
||||
if (RelativePath != NULL) {
|
||||
if (RelativePath) {
|
||||
RelativePath->clear();
|
||||
RelativePath->append(Filename.begin(), Filename.end());
|
||||
}
|
||||
|
@ -301,7 +301,7 @@ const FileEntry *DirectoryLookup::LookupFile(
|
|||
SmallString<1024> Path;
|
||||
StringRef Dest = HM->lookupFilename(Filename, Path);
|
||||
if (Dest.empty())
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
const FileEntry *Result;
|
||||
|
||||
|
@ -320,12 +320,12 @@ const FileEntry *DirectoryLookup::LookupFile(
|
|||
}
|
||||
|
||||
if (Result) {
|
||||
if (SearchPath != NULL) {
|
||||
if (SearchPath) {
|
||||
StringRef SearchPathRef(getName());
|
||||
SearchPath->clear();
|
||||
SearchPath->append(SearchPathRef.begin(), SearchPathRef.end());
|
||||
}
|
||||
if (RelativePath != NULL) {
|
||||
if (RelativePath) {
|
||||
RelativePath->clear();
|
||||
RelativePath->append(Filename.begin(), Filename.end());
|
||||
}
|
||||
|
@ -397,7 +397,7 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(
|
|||
|
||||
// Framework names must have a '/' in the filename.
|
||||
size_t SlashPos = Filename.find('/');
|
||||
if (SlashPos == StringRef::npos) return 0;
|
||||
if (SlashPos == StringRef::npos) return nullptr;
|
||||
|
||||
// Find out if this is the home for the specified framework, by checking
|
||||
// HeaderSearch. Possible answers are yes/no and unknown.
|
||||
|
@ -406,7 +406,7 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(
|
|||
|
||||
// If it is known and in some other directory, fail.
|
||||
if (CacheEntry.Directory && CacheEntry.Directory != getFrameworkDir())
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
// Otherwise, construct the path to this framework dir.
|
||||
|
||||
|
@ -424,12 +424,12 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(
|
|||
FrameworkName += ".framework/";
|
||||
|
||||
// If the cache entry was unresolved, populate it now.
|
||||
if (CacheEntry.Directory == 0) {
|
||||
if (!CacheEntry.Directory) {
|
||||
HS.IncrementFrameworkLookupCount();
|
||||
|
||||
// If the framework dir doesn't exist, we fail.
|
||||
const DirectoryEntry *Dir = FileMgr.getDirectory(FrameworkName.str());
|
||||
if (Dir == 0) return 0;
|
||||
if (!Dir) return nullptr;
|
||||
|
||||
// Otherwise, if it does, remember that this is the right direntry for this
|
||||
// framework.
|
||||
|
@ -449,7 +449,7 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(
|
|||
// Set the 'user-specified system framework' flag.
|
||||
InUserSpecifiedSystemFramework = CacheEntry.IsUserSpecifiedSystemFramework;
|
||||
|
||||
if (RelativePath != NULL) {
|
||||
if (RelativePath) {
|
||||
RelativePath->clear();
|
||||
RelativePath->append(Filename.begin()+SlashPos+1, Filename.end());
|
||||
}
|
||||
|
@ -459,7 +459,7 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(
|
|||
|
||||
FrameworkName += "Headers/";
|
||||
|
||||
if (SearchPath != NULL) {
|
||||
if (SearchPath) {
|
||||
SearchPath->clear();
|
||||
// Without trailing '/'.
|
||||
SearchPath->append(FrameworkName.begin(), FrameworkName.end()-1);
|
||||
|
@ -473,7 +473,7 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(
|
|||
const char *Private = "Private";
|
||||
FrameworkName.insert(FrameworkName.begin()+OrigSize, Private,
|
||||
Private+strlen(Private));
|
||||
if (SearchPath != NULL)
|
||||
if (SearchPath)
|
||||
SearchPath->insert(SearchPath->begin()+OrigSize, Private,
|
||||
Private+strlen(Private));
|
||||
|
||||
|
@ -590,14 +590,14 @@ const FileEntry *HeaderSearch::LookupFile(
|
|||
|
||||
// If 'Filename' is absolute, check to see if it exists and no searching.
|
||||
if (llvm::sys::path::is_absolute(Filename)) {
|
||||
CurDir = 0;
|
||||
CurDir = nullptr;
|
||||
|
||||
// If this was an #include_next "/absolute/file", fail.
|
||||
if (FromDir) return 0;
|
||||
if (FromDir) return nullptr;
|
||||
|
||||
if (SearchPath != NULL)
|
||||
if (SearchPath)
|
||||
SearchPath->clear();
|
||||
if (RelativePath != NULL) {
|
||||
if (RelativePath) {
|
||||
RelativePath->clear();
|
||||
RelativePath->append(Filename.begin(), Filename.end());
|
||||
}
|
||||
|
@ -606,7 +606,7 @@ const FileEntry *HeaderSearch::LookupFile(
|
|||
}
|
||||
|
||||
// This is the header that MSVC's header search would have found.
|
||||
const FileEntry *MSFE = 0;
|
||||
const FileEntry *MSFE = nullptr;
|
||||
ModuleMap::KnownHeader MSSuggestedModule;
|
||||
|
||||
// Unless disabled, check to see if the file is in the #includer's
|
||||
|
@ -651,12 +651,12 @@ const FileEntry *HeaderSearch::LookupFile(
|
|||
ToHFI.IndexHeaderMapHeader = IndexHeaderMapHeader;
|
||||
ToHFI.Framework = Framework;
|
||||
|
||||
if (SearchPath != NULL) {
|
||||
if (SearchPath) {
|
||||
StringRef SearchPathRef(Includer->getDir()->getName());
|
||||
SearchPath->clear();
|
||||
SearchPath->append(SearchPathRef.begin(), SearchPathRef.end());
|
||||
}
|
||||
if (RelativePath != NULL) {
|
||||
if (RelativePath) {
|
||||
RelativePath->clear();
|
||||
RelativePath->append(Filename.begin(), Filename.end());
|
||||
}
|
||||
|
@ -682,7 +682,7 @@ const FileEntry *HeaderSearch::LookupFile(
|
|||
}
|
||||
}
|
||||
|
||||
CurDir = 0;
|
||||
CurDir = nullptr;
|
||||
|
||||
// If this is a system #include, ignore the user #include locs.
|
||||
unsigned i = isAngled ? AngledDirIdx : 0;
|
||||
|
@ -806,7 +806,7 @@ const FileEntry *HeaderSearch::LookupFile(
|
|||
}
|
||||
}
|
||||
|
||||
if (checkMSVCHeaderSearch(Diags, MSFE, 0, IncludeLoc)) {
|
||||
if (checkMSVCHeaderSearch(Diags, MSFE, nullptr, IncludeLoc)) {
|
||||
if (SuggestedModule)
|
||||
*SuggestedModule = MSSuggestedModule;
|
||||
return MSFE;
|
||||
|
@ -814,7 +814,7 @@ const FileEntry *HeaderSearch::LookupFile(
|
|||
|
||||
// Otherwise, didn't find it. Remember we didn't find this.
|
||||
CacheLookup.HitIdx = SearchDirs.size();
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/// LookupSubframeworkHeader - Look up a subframework for the specified
|
||||
|
@ -833,7 +833,7 @@ LookupSubframeworkHeader(StringRef Filename,
|
|||
// Framework names must have a '/' in the filename. Find it.
|
||||
// FIXME: Should we permit '\' on Windows?
|
||||
size_t SlashPos = Filename.find('/');
|
||||
if (SlashPos == StringRef::npos) return 0;
|
||||
if (SlashPos == StringRef::npos) return nullptr;
|
||||
|
||||
// Look up the base framework name of the ContextFileEnt.
|
||||
const char *ContextName = ContextFileEnt->getName();
|
||||
|
@ -841,10 +841,10 @@ LookupSubframeworkHeader(StringRef Filename,
|
|||
// If the context info wasn't a framework, couldn't be a subframework.
|
||||
const unsigned DotFrameworkLen = 10;
|
||||
const char *FrameworkPos = strstr(ContextName, ".framework");
|
||||
if (FrameworkPos == 0 ||
|
||||
if (FrameworkPos == nullptr ||
|
||||
(FrameworkPos[DotFrameworkLen] != '/' &&
|
||||
FrameworkPos[DotFrameworkLen] != '\\'))
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
SmallString<1024> FrameworkName(ContextName, FrameworkPos+DotFrameworkLen+1);
|
||||
|
||||
|
@ -861,24 +861,24 @@ LookupSubframeworkHeader(StringRef Filename,
|
|||
CacheLookup.getKeyLength() == FrameworkName.size() &&
|
||||
memcmp(CacheLookup.getKeyData(), &FrameworkName[0],
|
||||
CacheLookup.getKeyLength()) != 0)
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
// Cache subframework.
|
||||
if (CacheLookup.getValue().Directory == 0) {
|
||||
if (!CacheLookup.getValue().Directory) {
|
||||
++NumSubFrameworkLookups;
|
||||
|
||||
// If the framework dir doesn't exist, we fail.
|
||||
const DirectoryEntry *Dir = FileMgr.getDirectory(FrameworkName.str());
|
||||
if (Dir == 0) return 0;
|
||||
if (!Dir) return nullptr;
|
||||
|
||||
// Otherwise, if it does, remember that this is the right direntry for this
|
||||
// framework.
|
||||
CacheLookup.getValue().Directory = Dir;
|
||||
}
|
||||
|
||||
const FileEntry *FE = 0;
|
||||
const FileEntry *FE = nullptr;
|
||||
|
||||
if (RelativePath != NULL) {
|
||||
if (RelativePath) {
|
||||
RelativePath->clear();
|
||||
RelativePath->append(Filename.begin()+SlashPos+1, Filename.end());
|
||||
}
|
||||
|
@ -886,7 +886,7 @@ LookupSubframeworkHeader(StringRef Filename,
|
|||
// Check ".../Frameworks/HIToolbox.framework/Headers/HIToolbox.h"
|
||||
SmallString<1024> HeadersFilename(FrameworkName);
|
||||
HeadersFilename += "Headers/";
|
||||
if (SearchPath != NULL) {
|
||||
if (SearchPath) {
|
||||
SearchPath->clear();
|
||||
// Without trailing '/'.
|
||||
SearchPath->append(HeadersFilename.begin(), HeadersFilename.end()-1);
|
||||
|
@ -898,7 +898,7 @@ LookupSubframeworkHeader(StringRef Filename,
|
|||
// Check ".../Frameworks/HIToolbox.framework/PrivateHeaders/HIToolbox.h"
|
||||
HeadersFilename = FrameworkName;
|
||||
HeadersFilename += "PrivateHeaders/";
|
||||
if (SearchPath != NULL) {
|
||||
if (SearchPath) {
|
||||
SearchPath->clear();
|
||||
// Without trailing '/'.
|
||||
SearchPath->append(HeadersFilename.begin(), HeadersFilename.end()-1);
|
||||
|
@ -906,7 +906,7 @@ LookupSubframeworkHeader(StringRef Filename,
|
|||
|
||||
HeadersFilename.append(Filename.begin()+SlashPos+1, Filename.end());
|
||||
if (!(FE = FileMgr.getFile(HeadersFilename.str(), /*openFile=*/true)))
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// This file is a system header or C++ unfriendly if the old file is.
|
||||
|
@ -1228,15 +1228,15 @@ Module *HeaderSearch::loadFrameworkModule(StringRef Name,
|
|||
|
||||
case LMM_AlreadyLoaded:
|
||||
case LMM_NoDirectory:
|
||||
return 0;
|
||||
|
||||
return nullptr;
|
||||
|
||||
case LMM_NewlyLoaded:
|
||||
return ModMap.findModule(Name);
|
||||
}
|
||||
|
||||
|
||||
// Try to infer a module map from the framework directory.
|
||||
return ModMap.inferFrameworkModule(Name, Dir, IsSystem, /*Parent=*/0);
|
||||
return ModMap.inferFrameworkModule(Name, Dir, IsSystem, /*Parent=*/nullptr);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -379,7 +379,7 @@ unsigned Lexer::getSpelling(const Token &Tok, const char *&Buffer,
|
|||
const LangOptions &LangOpts, bool *Invalid) {
|
||||
assert((int)Tok.getLength() >= 0 && "Token character range is bogus!");
|
||||
|
||||
const char *TokStart = 0;
|
||||
const char *TokStart = nullptr;
|
||||
// NOTE: this has to be checked *before* testing for an IdentifierInfo.
|
||||
if (Tok.is(tok::raw_identifier))
|
||||
TokStart = Tok.getRawIdentifier().data();
|
||||
|
@ -395,7 +395,7 @@ unsigned Lexer::getSpelling(const Token &Tok, const char *&Buffer,
|
|||
if (Tok.isLiteral())
|
||||
TokStart = Tok.getLiteralData();
|
||||
|
||||
if (TokStart == 0) {
|
||||
if (!TokStart) {
|
||||
// Compute the start of the token in the input lexer buffer.
|
||||
bool CharDataInvalid = false;
|
||||
TokStart = SourceMgr.getCharacterData(Tok.getLocation(), &CharDataInvalid);
|
||||
|
@ -1286,7 +1286,7 @@ Slash:
|
|||
if (Ptr[0] == '?' && Ptr[1] == '?') {
|
||||
// If this is actually a legal trigraph (not something like "??x"), emit
|
||||
// a trigraph warning. If so, and if trigraphs are enabled, return it.
|
||||
if (char C = DecodeTrigraphChar(Ptr+2, Tok ? this : 0)) {
|
||||
if (char C = DecodeTrigraphChar(Ptr+2, Tok ? this : nullptr)) {
|
||||
// Remember that this token needs to be cleaned.
|
||||
if (Tok) Tok->setFlag(Token::NeedsCleaning);
|
||||
|
||||
|
@ -1449,7 +1449,7 @@ static void maybeDiagnoseIDCharCompat(DiagnosticsEngine &Diags, uint32_t C,
|
|||
bool Lexer::tryConsumeIdentifierUCN(const char *&CurPtr, unsigned Size,
|
||||
Token &Result) {
|
||||
const char *UCNPtr = CurPtr + Size;
|
||||
uint32_t CodePoint = tryReadUCN(UCNPtr, CurPtr, /*Token=*/0);
|
||||
uint32_t CodePoint = tryReadUCN(UCNPtr, CurPtr, /*Token=*/nullptr);
|
||||
if (CodePoint == 0 || !isAllowedIDChar(CodePoint, LangOpts))
|
||||
return false;
|
||||
|
||||
|
@ -1732,7 +1732,8 @@ const char *Lexer::LexUDSuffix(Token &Result, const char *CurPtr,
|
|||
/// either " or L" or u8" or u" or U".
|
||||
bool Lexer::LexStringLiteral(Token &Result, const char *CurPtr,
|
||||
tok::TokenKind Kind) {
|
||||
const char *NulCharacter = 0; // Does this string contain the \0 character?
|
||||
// Does this string contain the \0 character?
|
||||
const char *NulCharacter = nullptr;
|
||||
|
||||
if (!isLexingRawMode() &&
|
||||
(Kind == tok::utf8_string_literal ||
|
||||
|
@ -1868,7 +1869,8 @@ bool Lexer::LexRawStringLiteral(Token &Result, const char *CurPtr,
|
|||
/// LexAngledStringLiteral - Lex the remainder of an angled string literal,
|
||||
/// after having lexed the '<' character. This is used for #include filenames.
|
||||
bool Lexer::LexAngledStringLiteral(Token &Result, const char *CurPtr) {
|
||||
const char *NulCharacter = 0; // Does this string contain the \0 character?
|
||||
// Does this string contain the \0 character?
|
||||
const char *NulCharacter = nullptr;
|
||||
const char *AfterLessPos = CurPtr;
|
||||
char C = getAndAdvanceChar(CurPtr, Result);
|
||||
while (C != '>') {
|
||||
|
@ -1905,7 +1907,8 @@ bool Lexer::LexAngledStringLiteral(Token &Result, const char *CurPtr) {
|
|||
/// lexed either ' or L' or u' or U'.
|
||||
bool Lexer::LexCharConstant(Token &Result, const char *CurPtr,
|
||||
tok::TokenKind Kind) {
|
||||
const char *NulCharacter = 0; // Does this character contain the \0 character?
|
||||
// Does this character contain the \0 character?
|
||||
const char *NulCharacter = nullptr;
|
||||
|
||||
if (!isLexingRawMode() &&
|
||||
(Kind == tok::utf16_char_constant || Kind == tok::utf32_char_constant))
|
||||
|
@ -2606,7 +2609,7 @@ static const char *FindConflictEnd(const char *CurPtr, const char *BufferEnd,
|
|||
}
|
||||
return RestOfBuffer.data()+Pos;
|
||||
}
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/// IsStartOfConflictMarker - If the specified pointer is the start of a version
|
||||
|
@ -2916,7 +2919,7 @@ bool Lexer::LexTokenInternal(Token &Result, bool TokAtPhysicalStartOfLine) {
|
|||
LexNextToken:
|
||||
// New token, can't need cleaning yet.
|
||||
Result.clearFlag(Token::NeedsCleaning);
|
||||
Result.setIdentifierInfo(0);
|
||||
Result.setIdentifierInfo(nullptr);
|
||||
|
||||
// CurPtr - Cache BufferPtr in an automatic variable.
|
||||
const char *CurPtr = BufferPtr;
|
||||
|
|
|
@ -195,7 +195,7 @@ static unsigned ProcessCharEscape(const char *ThisTokBegin,
|
|||
<< std::string(1, ResultChar);
|
||||
break;
|
||||
default:
|
||||
if (Diags == 0)
|
||||
if (!Diags)
|
||||
break;
|
||||
|
||||
if (isPrintable(ResultChar))
|
||||
|
@ -340,7 +340,7 @@ static int MeasureUCNEscape(const char *ThisTokBegin, const char *&ThisTokBuf,
|
|||
FullSourceLoc Loc;
|
||||
|
||||
if (!ProcessUCNEscape(ThisTokBegin, ThisTokBuf, ThisTokEnd, UcnVal,
|
||||
UcnLen, Loc, 0, Features, true)) {
|
||||
UcnLen, Loc, nullptr, Features, true)) {
|
||||
HadError = true;
|
||||
return 0;
|
||||
}
|
||||
|
@ -571,7 +571,7 @@ NumericLiteralParser::NumericLiteralParser(StringRef TokSpelling,
|
|||
// Parse the suffix. At this point we can classify whether we have an FP or
|
||||
// integer constant.
|
||||
bool isFPConstant = isFloatingLiteral();
|
||||
const char *ImaginarySuffixLoc = 0;
|
||||
const char *ImaginarySuffixLoc = nullptr;
|
||||
|
||||
// Loop over all of the characters of the suffix. If we see something bad,
|
||||
// we break out of the loop.
|
||||
|
@ -1254,7 +1254,7 @@ StringLiteralParser::
|
|||
StringLiteralParser(const Token *StringToks, unsigned NumStringToks,
|
||||
Preprocessor &PP, bool Complain)
|
||||
: SM(PP.getSourceManager()), Features(PP.getLangOpts()),
|
||||
Target(PP.getTargetInfo()), Diags(Complain ? &PP.getDiagnostics() : 0),
|
||||
Target(PP.getTargetInfo()), Diags(Complain ? &PP.getDiagnostics() :nullptr),
|
||||
MaxTokenLength(0), SizeBound(0), CharByteWidth(0), Kind(tok::unknown),
|
||||
ResultPtr(ResultBuf.data()), hadError(false), Pascal(false) {
|
||||
init(StringToks, NumStringToks);
|
||||
|
|
|
@ -27,7 +27,7 @@ MacroArgs *MacroArgs::create(const MacroInfo *MI,
|
|||
bool VarargsElided, Preprocessor &PP) {
|
||||
assert(MI->isFunctionLike() &&
|
||||
"Can't have args for an object-like macro!");
|
||||
MacroArgs **ResultEnt = 0;
|
||||
MacroArgs **ResultEnt = nullptr;
|
||||
unsigned ClosestMatch = ~0U;
|
||||
|
||||
// See if we have an entry with a big enough argument list to reuse on the
|
||||
|
@ -46,7 +46,7 @@ MacroArgs *MacroArgs::create(const MacroInfo *MI,
|
|||
}
|
||||
|
||||
MacroArgs *Result;
|
||||
if (ResultEnt == 0) {
|
||||
if (!ResultEnt) {
|
||||
// Allocate memory for a MacroArgs object with the lexer tokens at the end.
|
||||
Result = (MacroArgs*)malloc(sizeof(MacroArgs) +
|
||||
UnexpArgTokens.size() * sizeof(Token));
|
||||
|
|
|
@ -17,7 +17,7 @@ using namespace clang;
|
|||
|
||||
MacroInfo::MacroInfo(SourceLocation DefLoc)
|
||||
: Location(DefLoc),
|
||||
ArgumentList(0),
|
||||
ArgumentList(nullptr),
|
||||
NumArguments(0),
|
||||
IsDefinitionLengthCached(false),
|
||||
IsFunctionLike(false),
|
||||
|
@ -145,7 +145,8 @@ MacroDirective::DefInfo MacroDirective::getDefinition() {
|
|||
isPublic = VisMD->isPublic();
|
||||
}
|
||||
|
||||
return DefInfo(0, UndefLoc, !isPublic.hasValue() || isPublic.getValue());
|
||||
return DefInfo(nullptr, UndefLoc,
|
||||
!isPublic.hasValue() || isPublic.getValue());
|
||||
}
|
||||
|
||||
const MacroDirective::DefInfo
|
||||
|
|
|
@ -42,7 +42,7 @@ ModuleMap::resolveExport(Module *Mod,
|
|||
// We may have just a wildcard.
|
||||
if (Unresolved.Id.empty()) {
|
||||
assert(Unresolved.Wildcard && "Invalid unresolved export");
|
||||
return Module::ExportDecl(0, true);
|
||||
return Module::ExportDecl(nullptr, true);
|
||||
}
|
||||
|
||||
// Resolve the module-id.
|
||||
|
@ -62,7 +62,7 @@ Module *ModuleMap::resolveModuleId(const ModuleId &Id, Module *Mod,
|
|||
Diags.Report(Id[0].second, diag::err_mmap_missing_module_unqualified)
|
||||
<< Id[0].first << Mod->getFullModuleName();
|
||||
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Dig into the module path.
|
||||
|
@ -74,7 +74,7 @@ Module *ModuleMap::resolveModuleId(const ModuleId &Id, Module *Mod,
|
|||
<< Id[I].first << Context->getFullModuleName()
|
||||
<< SourceRange(Id[0].second, Id[I-1].second);
|
||||
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
Context = Sub;
|
||||
|
@ -87,8 +87,8 @@ ModuleMap::ModuleMap(SourceManager &SourceMgr, DiagnosticsEngine &Diags,
|
|||
const LangOptions &LangOpts, const TargetInfo *Target,
|
||||
HeaderSearch &HeaderInfo)
|
||||
: SourceMgr(SourceMgr), Diags(Diags), LangOpts(LangOpts), Target(Target),
|
||||
HeaderInfo(HeaderInfo), BuiltinIncludeDir(0), CompilingModule(0),
|
||||
SourceModule(0) {}
|
||||
HeaderInfo(HeaderInfo), BuiltinIncludeDir(nullptr),
|
||||
CompilingModule(nullptr), SourceModule(nullptr) {}
|
||||
|
||||
ModuleMap::~ModuleMap() {
|
||||
for (llvm::StringMap<Module *>::iterator I = Modules.begin(),
|
||||
|
@ -247,8 +247,8 @@ void ModuleMap::diagnoseHeaderInclusion(Module *RequestingModule,
|
|||
resolveUses(RequestingModule, /*Complain=*/false);
|
||||
|
||||
bool Excluded = false;
|
||||
Module *Private = NULL;
|
||||
Module *NotUsed = NULL;
|
||||
Module *Private = nullptr;
|
||||
Module *NotUsed = nullptr;
|
||||
|
||||
HeadersMap::iterator Known = findKnownHeader(File);
|
||||
if (Known != Headers.end()) {
|
||||
|
@ -284,14 +284,14 @@ void ModuleMap::diagnoseHeaderInclusion(Module *RequestingModule,
|
|||
}
|
||||
|
||||
// We have found a header, but it is private.
|
||||
if (Private != NULL) {
|
||||
if (Private) {
|
||||
Diags.Report(FilenameLoc, diag::error_use_of_private_header_outside_module)
|
||||
<< Filename;
|
||||
return;
|
||||
}
|
||||
|
||||
// We have found a module, but we don't use it.
|
||||
if (NotUsed != NULL) {
|
||||
if (NotUsed) {
|
||||
Diags.Report(FilenameLoc, diag::error_undeclared_use_of_module)
|
||||
<< RequestingModule->getFullModuleName() << Filename;
|
||||
return;
|
||||
|
@ -387,7 +387,7 @@ ModuleMap::findModuleForHeader(const FileEntry *File,
|
|||
// If inferred submodules export everything they import, add a
|
||||
// wildcard to the set of exports.
|
||||
if (UmbrellaModule->InferExportWildcard && Result->Exports.empty())
|
||||
Result->Exports.push_back(Module::ExportDecl(0, true));
|
||||
Result->Exports.push_back(Module::ExportDecl(nullptr, true));
|
||||
}
|
||||
|
||||
// Infer a submodule with the same name as this header file.
|
||||
|
@ -402,7 +402,7 @@ ModuleMap::findModuleForHeader(const FileEntry *File,
|
|||
// If inferred submodules export everything they import, add a
|
||||
// wildcard to the set of exports.
|
||||
if (UmbrellaModule->InferExportWildcard && Result->Exports.empty())
|
||||
Result->Exports.push_back(Module::ExportDecl(0, true));
|
||||
Result->Exports.push_back(Module::ExportDecl(nullptr, true));
|
||||
} else {
|
||||
// Record each of the directories we stepped through as being part of
|
||||
// the module we found, since the umbrella header covers them all.
|
||||
|
@ -424,7 +424,7 @@ ModuleMap::findModuleForHeader(const FileEntry *File,
|
|||
}
|
||||
|
||||
bool ModuleMap::isHeaderInUnavailableModule(const FileEntry *Header) const {
|
||||
return isHeaderUnavailableInModule(Header, 0);
|
||||
return isHeaderUnavailableInModule(Header, nullptr);
|
||||
}
|
||||
|
||||
bool
|
||||
|
@ -513,8 +513,8 @@ Module *ModuleMap::findModule(StringRef Name) const {
|
|||
llvm::StringMap<Module *>::const_iterator Known = Modules.find(Name);
|
||||
if (Known != Modules.end())
|
||||
return Known->getValue();
|
||||
|
||||
return 0;
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
Module *ModuleMap::lookupModuleUnqualified(StringRef Name,
|
||||
|
@ -665,7 +665,7 @@ ModuleMap::inferFrameworkModule(StringRef ModuleName,
|
|||
|
||||
// If we're not allowed to infer a framework module, don't.
|
||||
if (!canInfer)
|
||||
return 0;
|
||||
return nullptr;
|
||||
} else
|
||||
ModuleMapFile = Parent->ModuleMap;
|
||||
|
||||
|
@ -679,8 +679,8 @@ ModuleMap::inferFrameworkModule(StringRef ModuleName,
|
|||
// framework to load *everything*. But, it's not clear that this is a good
|
||||
// idea.
|
||||
if (!UmbrellaHeader)
|
||||
return 0;
|
||||
|
||||
return nullptr;
|
||||
|
||||
Module *Result = new Module(ModuleName, SourceLocation(), Parent, ModuleMapFile,
|
||||
/*IsFramework=*/true, /*IsExplicit=*/false);
|
||||
if (LangOpts.CurrentModule == ModuleName) {
|
||||
|
@ -699,8 +699,8 @@ ModuleMap::inferFrameworkModule(StringRef ModuleName,
|
|||
UmbrellaDirs[UmbrellaHeader->getDir()] = Result;
|
||||
|
||||
// export *
|
||||
Result->Exports.push_back(Module::ExportDecl(0, true));
|
||||
|
||||
Result->Exports.push_back(Module::ExportDecl(nullptr, true));
|
||||
|
||||
// module * { export * }
|
||||
Result->InferSubmodules = true;
|
||||
Result->InferExportWildcard = true;
|
||||
|
@ -787,7 +787,7 @@ void ModuleMap::addHeader(Module *Mod, const FileEntry *Header,
|
|||
const FileEntry *
|
||||
ModuleMap::getContainingModuleMapFile(Module *Module) const {
|
||||
if (Module->DefinitionLoc.isInvalid())
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
return SourceMgr.getFileEntryForID(
|
||||
SourceMgr.getFileID(Module->DefinitionLoc));
|
||||
|
@ -864,14 +864,13 @@ bool ModuleMap::resolveConflicts(Module *Mod, bool Complain) {
|
|||
|
||||
Module *ModuleMap::inferModuleFromLocation(FullSourceLoc Loc) {
|
||||
if (Loc.isInvalid())
|
||||
return 0;
|
||||
|
||||
return nullptr;
|
||||
|
||||
// Use the expansion location to determine which module we're in.
|
||||
FullSourceLoc ExpansionLoc = Loc.getExpansionLoc();
|
||||
if (!ExpansionLoc.isFileID())
|
||||
return 0;
|
||||
|
||||
|
||||
return nullptr;
|
||||
|
||||
const SourceManager &SrcMgr = Loc.getManager();
|
||||
FileID ExpansionFileID = ExpansionLoc.getFileID();
|
||||
|
||||
|
@ -885,12 +884,12 @@ Module *ModuleMap::inferModuleFromLocation(FullSourceLoc Loc) {
|
|||
// any included header has an associated module.
|
||||
SourceLocation IncludeLoc = SrcMgr.getIncludeLoc(ExpansionFileID);
|
||||
if (IncludeLoc.isInvalid())
|
||||
return 0;
|
||||
|
||||
return nullptr;
|
||||
|
||||
ExpansionFileID = SrcMgr.getFileID(IncludeLoc);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------//
|
||||
|
@ -936,7 +935,7 @@ namespace clang {
|
|||
Kind = EndOfFile;
|
||||
Location = 0;
|
||||
StringLength = 0;
|
||||
StringData = 0;
|
||||
StringData = nullptr;
|
||||
}
|
||||
|
||||
bool is(TokenKind K) const { return Kind == K; }
|
||||
|
@ -1036,7 +1035,7 @@ namespace clang {
|
|||
: L(L), SourceMgr(SourceMgr), Target(Target), Diags(Diags), Map(Map),
|
||||
ModuleMapFile(ModuleMapFile), Directory(Directory),
|
||||
BuiltinIncludeDir(BuiltinIncludeDir), IsSystem(IsSystem),
|
||||
HadError(false), ActiveModule(0)
|
||||
HadError(false), ActiveModule(nullptr)
|
||||
{
|
||||
Tok.clear();
|
||||
consumeToken();
|
||||
|
@ -1322,7 +1321,7 @@ void ModuleMapParser::parseModuleDecl() {
|
|||
if (Id.size() > 1) {
|
||||
// This module map defines a submodule. Go find the module of which it
|
||||
// is a submodule.
|
||||
ActiveModule = 0;
|
||||
ActiveModule = nullptr;
|
||||
for (unsigned I = 0, N = Id.size() - 1; I != N; ++I) {
|
||||
if (Module *Next = Map.lookupModuleQualified(Id[I].first, ActiveModule)) {
|
||||
ActiveModule = Next;
|
||||
|
@ -1648,8 +1647,8 @@ void ModuleMapParser::parseHeaderDecl(MMToken::TokenKind LeadingToken,
|
|||
}
|
||||
|
||||
// Look for this file.
|
||||
const FileEntry *File = 0;
|
||||
const FileEntry *BuiltinFile = 0;
|
||||
const FileEntry *File = nullptr;
|
||||
const FileEntry *BuiltinFile = nullptr;
|
||||
SmallString<128> PathName;
|
||||
if (llvm::sys::path::is_absolute(Header.FileName)) {
|
||||
PathName = Header.FileName;
|
||||
|
@ -1692,7 +1691,7 @@ void ModuleMapParser::parseHeaderDecl(MMToken::TokenKind LeadingToken,
|
|||
// up adding both (later).
|
||||
if (!File && BuiltinFile) {
|
||||
File = BuiltinFile;
|
||||
BuiltinFile = 0;
|
||||
BuiltinFile = nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1763,7 +1762,7 @@ void ModuleMapParser::parseUmbrellaDirDecl(SourceLocation UmbrellaLoc) {
|
|||
}
|
||||
|
||||
// Look for this file.
|
||||
const DirectoryEntry *Dir = 0;
|
||||
const DirectoryEntry *Dir = nullptr;
|
||||
if (llvm::sys::path::is_absolute(DirName))
|
||||
Dir = SourceMgr.getFileManager().getDirectory(DirName);
|
||||
else {
|
||||
|
@ -2080,7 +2079,7 @@ void ModuleMapParser::parseInferredModuleDecl(bool Framework, bool Explicit) {
|
|||
case MMToken::ExcludeKeyword: {
|
||||
if (ActiveModule) {
|
||||
Diags.Report(Tok.getLocation(), diag::err_mmap_expected_inferred_member)
|
||||
<< (ActiveModule != 0);
|
||||
<< (ActiveModule != nullptr);
|
||||
consumeToken();
|
||||
break;
|
||||
}
|
||||
|
@ -2100,7 +2099,7 @@ void ModuleMapParser::parseInferredModuleDecl(bool Framework, bool Explicit) {
|
|||
case MMToken::ExportKeyword:
|
||||
if (!ActiveModule) {
|
||||
Diags.Report(Tok.getLocation(), diag::err_mmap_expected_inferred_member)
|
||||
<< (ActiveModule != 0);
|
||||
<< (ActiveModule != nullptr);
|
||||
consumeToken();
|
||||
break;
|
||||
}
|
||||
|
@ -2121,7 +2120,7 @@ void ModuleMapParser::parseInferredModuleDecl(bool Framework, bool Explicit) {
|
|||
case MMToken::UmbrellaKeyword:
|
||||
default:
|
||||
Diags.Report(Tok.getLocation(), diag::err_mmap_expected_inferred_member)
|
||||
<< (ActiveModule != 0);
|
||||
<< (ActiveModule != nullptr);
|
||||
consumeToken();
|
||||
break;
|
||||
}
|
||||
|
@ -2257,7 +2256,7 @@ bool ModuleMap::parseModuleMapFile(const FileEntry *File, bool IsSystem) {
|
|||
if (Known != ParsedModuleMap.end())
|
||||
return Known->second;
|
||||
|
||||
assert(Target != 0 && "Missing target information");
|
||||
assert(Target && "Missing target information");
|
||||
auto FileCharacter = IsSystem ? SrcMgr::C_System : SrcMgr::C_User;
|
||||
FileID ID = SourceMgr.createFileID(File, SourceLocation(), FileCharacter);
|
||||
const llvm::MemoryBuffer *Buffer = SourceMgr.getBuffer(ID);
|
||||
|
|
|
@ -45,7 +45,7 @@ MacroInfo *Preprocessor::AllocateMacroInfo() {
|
|||
}
|
||||
|
||||
MIChain->Next = MIChainHead;
|
||||
MIChain->Prev = 0;
|
||||
MIChain->Prev = nullptr;
|
||||
if (MIChainHead)
|
||||
MIChainHead->Prev = MIChain;
|
||||
MIChainHead = MIChain;
|
||||
|
@ -111,7 +111,7 @@ void Preprocessor::ReleaseMacroInfo(MacroInfo *MI) {
|
|||
else {
|
||||
assert(MIChainHead == MIChain);
|
||||
MIChainHead = MIChain->Next;
|
||||
MIChainHead->Prev = 0;
|
||||
MIChainHead->Prev = nullptr;
|
||||
}
|
||||
MIChain->Next = MICache;
|
||||
MICache = MIChain;
|
||||
|
@ -154,7 +154,7 @@ void Preprocessor::ReadMacroName(Token &MacroNameTok, char isDefineUndef) {
|
|||
}
|
||||
|
||||
IdentifierInfo *II = MacroNameTok.getIdentifierInfo();
|
||||
if (II == 0) {
|
||||
if (!II) {
|
||||
bool Invalid = false;
|
||||
std::string Spelling = getSpelling(MacroNameTok, &Invalid);
|
||||
if (Invalid)
|
||||
|
@ -418,7 +418,7 @@ void Preprocessor::SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
|
|||
// looked up, etc, inside the #elif expression.
|
||||
assert(CurPPLexer->LexingRawMode && "We have to be skipping here!");
|
||||
CurPPLexer->LexingRawMode = false;
|
||||
IdentifierInfo *IfNDefMacro = 0;
|
||||
IdentifierInfo *IfNDefMacro = nullptr;
|
||||
const bool CondValue = EvaluateDirectiveExpression(IfNDefMacro);
|
||||
CurPPLexer->LexingRawMode = true;
|
||||
if (Callbacks) {
|
||||
|
@ -515,7 +515,7 @@ void Preprocessor::PTHSkipExcludedConditionalBlock() {
|
|||
continue;
|
||||
|
||||
// Evaluate the condition of the #elif.
|
||||
IdentifierInfo *IfNDefMacro = 0;
|
||||
IdentifierInfo *IfNDefMacro = nullptr;
|
||||
CurPTHLexer->ParsingPreprocessorDirective = true;
|
||||
bool ShouldEnter = EvaluateDirectiveExpression(IfNDefMacro);
|
||||
CurPTHLexer->ParsingPreprocessorDirective = false;
|
||||
|
@ -641,7 +641,7 @@ const FileEntry *Preprocessor::LookupFile(
|
|||
}
|
||||
|
||||
// Otherwise, we really couldn't find the file.
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
||||
|
@ -739,7 +739,7 @@ void Preprocessor::HandleDirective(Token &Result) {
|
|||
return HandleDigitDirective(Result);
|
||||
default:
|
||||
IdentifierInfo *II = Result.getIdentifierInfo();
|
||||
if (II == 0) break; // Not an identifier.
|
||||
if (!II) break; // Not an identifier.
|
||||
|
||||
// Ask what the preprocessor keyword ID is.
|
||||
switch (II->getPPKeywordID()) {
|
||||
|
@ -1204,7 +1204,7 @@ void Preprocessor::HandleMacroPublicDirective(Token &Tok) {
|
|||
MacroDirective *MD = getMacroDirective(II);
|
||||
|
||||
// If the macro is not defined, this is an error.
|
||||
if (MD == 0) {
|
||||
if (!MD) {
|
||||
Diag(MacroNameTok, diag::err_pp_visibility_non_macro) << II;
|
||||
return;
|
||||
}
|
||||
|
@ -1231,7 +1231,7 @@ void Preprocessor::HandleMacroPrivateDirective(Token &Tok) {
|
|||
MacroDirective *MD = getMacroDirective(II);
|
||||
|
||||
// If the macro is not defined, this is an error.
|
||||
if (MD == 0) {
|
||||
if (!MD) {
|
||||
Diag(MacroNameTok, diag::err_pp_visibility_non_macro) << II;
|
||||
return;
|
||||
}
|
||||
|
@ -1467,9 +1467,9 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
|
|||
}
|
||||
const FileEntry *File = LookupFile(
|
||||
FilenameLoc, LangOpts.MSVCCompat ? NormalizedPath.c_str() : Filename,
|
||||
isAngled, LookupFrom, CurDir, Callbacks ? &SearchPath : NULL,
|
||||
Callbacks ? &RelativePath : NULL,
|
||||
HeaderInfo.getHeaderSearchOpts().ModuleMaps ? &SuggestedModule : 0);
|
||||
isAngled, LookupFrom, CurDir, Callbacks ? &SearchPath : nullptr,
|
||||
Callbacks ? &RelativePath : nullptr,
|
||||
HeaderInfo.getHeaderSearchOpts().ModuleMaps ? &SuggestedModule : nullptr);
|
||||
|
||||
if (Callbacks) {
|
||||
if (!File) {
|
||||
|
@ -1485,10 +1485,10 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
|
|||
File = LookupFile(FilenameLoc,
|
||||
LangOpts.MSVCCompat ? NormalizedPath.c_str()
|
||||
: Filename,
|
||||
isAngled, LookupFrom, CurDir, 0, 0,
|
||||
isAngled, LookupFrom, CurDir, nullptr, nullptr,
|
||||
HeaderInfo.getHeaderSearchOpts().ModuleMaps
|
||||
? &SuggestedModule
|
||||
: 0,
|
||||
: nullptr,
|
||||
/*SkipCache*/ true);
|
||||
}
|
||||
}
|
||||
|
@ -1500,11 +1500,11 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
|
|||
LangOpts.MSVCCompat ? NormalizedPath.c_str()
|
||||
: Filename,
|
||||
isAngled, FilenameRange, File, SearchPath,
|
||||
RelativePath, /*ImportedModule=*/0);
|
||||
RelativePath, /*ImportedModule=*/nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
if (File == 0) {
|
||||
|
||||
if (!File) {
|
||||
if (!SuppressIncludeNotFoundError) {
|
||||
// If the file could not be located and it was included via angle
|
||||
// brackets, we can attempt a lookup as though it were a quoted path to
|
||||
|
@ -1512,9 +1512,10 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
|
|||
if (isAngled) {
|
||||
File = LookupFile(
|
||||
FilenameLoc, LangOpts.MSVCCompat ? NormalizedPath.c_str() : Filename,
|
||||
false, LookupFrom, CurDir, Callbacks ? &SearchPath : 0,
|
||||
Callbacks ? &RelativePath : 0,
|
||||
HeaderInfo.getHeaderSearchOpts().ModuleMaps ? &SuggestedModule : 0);
|
||||
false, LookupFrom, CurDir, Callbacks ? &SearchPath : nullptr,
|
||||
Callbacks ? &RelativePath : nullptr,
|
||||
HeaderInfo.getHeaderSearchOpts().ModuleMaps ? &SuggestedModule
|
||||
: nullptr);
|
||||
if (File) {
|
||||
SourceRange Range(FilenameTok.getLocation(), CharEnd);
|
||||
Diag(FilenameTok, diag::err_pp_file_not_found_not_fatal) <<
|
||||
|
@ -1595,7 +1596,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
|
|||
ModuleLoadResult Imported
|
||||
= TheModuleLoader.loadModule(IncludeTok.getLocation(), Path, Visibility,
|
||||
/*IsIncludeDirective=*/true);
|
||||
assert((Imported == 0 || Imported == SuggestedModule.getModule()) &&
|
||||
assert((Imported == nullptr || Imported == SuggestedModule.getModule()) &&
|
||||
"the imported module is different than the suggested one");
|
||||
|
||||
if (!Imported && hadModuleLoaderFatalFailure()) {
|
||||
|
@ -1646,7 +1647,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
|
|||
Callbacks->InclusionDirective(HashLoc, IncludeTok, Filename, isAngled,
|
||||
FilenameRange, File,
|
||||
SearchPath, RelativePath,
|
||||
/*ImportedModule=*/0);
|
||||
/*ImportedModule=*/nullptr);
|
||||
}
|
||||
|
||||
// The #included file will be considered to be a system header if either it is
|
||||
|
@ -1707,9 +1708,9 @@ void Preprocessor::HandleIncludeNextDirective(SourceLocation HashLoc,
|
|||
// diagnostic.
|
||||
const DirectoryLookup *Lookup = CurDirLookup;
|
||||
if (isInPrimaryFile()) {
|
||||
Lookup = 0;
|
||||
Lookup = nullptr;
|
||||
Diag(IncludeNextTok, diag::pp_include_next_in_primary);
|
||||
} else if (Lookup == 0) {
|
||||
} else if (!Lookup) {
|
||||
Diag(IncludeNextTok, diag::pp_include_next_absolute_path);
|
||||
} else {
|
||||
// Start looking up in the next directory.
|
||||
|
@ -1742,7 +1743,7 @@ void Preprocessor::HandleImportDirective(SourceLocation HashLoc,
|
|||
return HandleMicrosoftImportDirective(ImportTok);
|
||||
Diag(ImportTok, diag::ext_pp_import_directive);
|
||||
}
|
||||
return HandleIncludeDirective(HashLoc, ImportTok, 0, true);
|
||||
return HandleIncludeDirective(HashLoc, ImportTok, nullptr, true);
|
||||
}
|
||||
|
||||
/// HandleIncludeMacrosDirective - The -imacros command line option turns into a
|
||||
|
@ -1763,7 +1764,7 @@ void Preprocessor::HandleIncludeMacrosDirective(SourceLocation HashLoc,
|
|||
|
||||
// Treat this as a normal #include for checking purposes. If this is
|
||||
// successful, it will push a new lexer onto the include stack.
|
||||
HandleIncludeDirective(HashLoc, IncludeMacrosTok, 0, false);
|
||||
HandleIncludeDirective(HashLoc, IncludeMacrosTok, nullptr, false);
|
||||
|
||||
Token TmpTok;
|
||||
do {
|
||||
|
@ -1823,7 +1824,7 @@ bool Preprocessor::ReadMacroDefinitionArgList(MacroInfo *MI, Token &Tok) {
|
|||
// Handle keywords and identifiers here to accept things like
|
||||
// #define Foo(for) for.
|
||||
IdentifierInfo *II = Tok.getIdentifierInfo();
|
||||
if (II == 0) {
|
||||
if (!II) {
|
||||
// #define X(1
|
||||
Diag(Tok, diag::err_pp_invalid_tok_in_arg_list);
|
||||
return true;
|
||||
|
@ -2027,7 +2028,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok,
|
|||
LexUnexpandedToken(Tok);
|
||||
|
||||
// Check for a valid macro arg identifier.
|
||||
if (Tok.getIdentifierInfo() == 0 ||
|
||||
if (Tok.getIdentifierInfo() == nullptr ||
|
||||
MI->getArgumentNum(Tok.getIdentifierInfo()) == -1) {
|
||||
|
||||
// If this is assembler-with-cpp mode, we accept random gibberish after
|
||||
|
@ -2143,7 +2144,7 @@ void Preprocessor::HandleUndefDirective(Token &UndefTok) {
|
|||
|
||||
// Okay, we finally have a valid identifier to undef.
|
||||
MacroDirective *MD = getMacroDirective(MacroNameTok.getIdentifierInfo());
|
||||
const MacroInfo *MI = MD ? MD->getMacroInfo() : 0;
|
||||
const MacroInfo *MI = MD ? MD->getMacroInfo() : nullptr;
|
||||
|
||||
// If the callbacks want to know, tell them about the macro #undef.
|
||||
// Note: no matter if the macro was defined or not.
|
||||
|
@ -2151,7 +2152,8 @@ void Preprocessor::HandleUndefDirective(Token &UndefTok) {
|
|||
Callbacks->MacroUndefined(MacroNameTok, MD);
|
||||
|
||||
// If the macro is not defined, this is a noop undef, just return.
|
||||
if (MI == 0) return;
|
||||
if (!MI)
|
||||
return;
|
||||
|
||||
if (!MI->isUsed() && MI->isWarnIfUnused())
|
||||
Diag(MI->getDefinitionLoc(), diag::pp_macro_not_used);
|
||||
|
@ -2195,14 +2197,14 @@ void Preprocessor::HandleIfdefDirective(Token &Result, bool isIfndef,
|
|||
|
||||
IdentifierInfo *MII = MacroNameTok.getIdentifierInfo();
|
||||
MacroDirective *MD = getMacroDirective(MII);
|
||||
MacroInfo *MI = MD ? MD->getMacroInfo() : 0;
|
||||
MacroInfo *MI = MD ? MD->getMacroInfo() : nullptr;
|
||||
|
||||
if (CurPPLexer->getConditionalStackDepth() == 0) {
|
||||
// If the start of a top-level #ifdef and if the macro is not defined,
|
||||
// inform MIOpt that this might be the start of a proper include guard.
|
||||
// Otherwise it is some other form of unknown conditional which we can't
|
||||
// handle.
|
||||
if (!ReadAnyTokensBeforeDirective && MI == 0) {
|
||||
if (!ReadAnyTokensBeforeDirective && !MI) {
|
||||
assert(isIfndef && "#ifdef shouldn't reach here");
|
||||
CurPPLexer->MIOpt.EnterTopLevelIfndef(MII, MacroNameTok.getLocation());
|
||||
} else
|
||||
|
@ -2241,7 +2243,7 @@ void Preprocessor::HandleIfDirective(Token &IfToken,
|
|||
++NumIf;
|
||||
|
||||
// Parse and evaluate the conditional expression.
|
||||
IdentifierInfo *IfNDefMacro = 0;
|
||||
IdentifierInfo *IfNDefMacro = nullptr;
|
||||
const SourceLocation ConditionalBegin = CurPPLexer->getSourceLocation();
|
||||
const bool ConditionalTrue = EvaluateDirectiveExpression(IfNDefMacro);
|
||||
const SourceLocation ConditionalEnd = CurPPLexer->getSourceLocation();
|
||||
|
|
|
@ -104,7 +104,7 @@ static bool EvaluateDefined(PPValue &Result, Token &PeekTok, DefinedTracker &DT,
|
|||
}
|
||||
|
||||
// If we don't have a pp-identifier now, this is an error.
|
||||
if ((II = PeekTok.getIdentifierInfo()) == 0) {
|
||||
if ((II = PeekTok.getIdentifierInfo()) == nullptr) {
|
||||
PP.Diag(PeekTok, diag::err_pp_defined_requires_identifier);
|
||||
return true;
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ static bool EvaluateDefined(PPValue &Result, Token &PeekTok, DefinedTracker &DT,
|
|||
Result.Val = II->hasMacroDefinition();
|
||||
Result.Val.setIsUnsigned(false); // Result is signed intmax_t.
|
||||
|
||||
MacroDirective *Macro = 0;
|
||||
MacroDirective *Macro = nullptr;
|
||||
// If there is a macro, mark it used.
|
||||
if (Result.Val != 0 && ValueLive) {
|
||||
Macro = PP.getMacroDirective(II);
|
||||
|
|
|
@ -58,7 +58,7 @@ PreprocessorLexer *Preprocessor::getCurrentFileLexer() const {
|
|||
if (IsFileLexer(ISI))
|
||||
return ISI.ThePPLexer;
|
||||
}
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
||||
|
@ -117,7 +117,7 @@ void Preprocessor::EnterSourceFileWithLexer(Lexer *TheLexer,
|
|||
CurLexer.reset(TheLexer);
|
||||
CurPPLexer = TheLexer;
|
||||
CurDirLookup = CurDir;
|
||||
CurSubmodule = 0;
|
||||
CurSubmodule = nullptr;
|
||||
if (CurLexerKind != CLK_LexAfterModuleImport)
|
||||
CurLexerKind = CLK_Lexer;
|
||||
|
||||
|
@ -142,7 +142,7 @@ void Preprocessor::EnterSourceFileWithPTH(PTHLexer *PL,
|
|||
CurDirLookup = CurDir;
|
||||
CurPTHLexer.reset(PL);
|
||||
CurPPLexer = CurPTHLexer.get();
|
||||
CurSubmodule = 0;
|
||||
CurSubmodule = nullptr;
|
||||
if (CurLexerKind != CLK_LexAfterModuleImport)
|
||||
CurLexerKind = CLK_PTHLexer;
|
||||
|
||||
|
@ -169,7 +169,7 @@ void Preprocessor::EnterMacro(Token &Tok, SourceLocation ILEnd,
|
|||
}
|
||||
|
||||
PushIncludeMacroStack();
|
||||
CurDirLookup = 0;
|
||||
CurDirLookup = nullptr;
|
||||
CurTokenLexer.reset(TokLexer);
|
||||
if (CurLexerKind != CLK_LexAfterModuleImport)
|
||||
CurLexerKind = CLK_TokenLexer;
|
||||
|
@ -202,7 +202,7 @@ void Preprocessor::EnterTokenStream(const Token *Toks, unsigned NumToks,
|
|||
|
||||
// Save our current state.
|
||||
PushIncludeMacroStack();
|
||||
CurDirLookup = 0;
|
||||
CurDirLookup = nullptr;
|
||||
CurTokenLexer.reset(TokLexer);
|
||||
if (CurLexerKind != CLK_LexAfterModuleImport)
|
||||
CurLexerKind = CLK_TokenLexer;
|
||||
|
@ -354,7 +354,7 @@ bool Preprocessor::HandleEndOfFile(Token &Result, bool isEndOfMacro) {
|
|||
CurPTHLexer.reset();
|
||||
}
|
||||
|
||||
CurPPLexer = 0;
|
||||
CurPPLexer = nullptr;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -429,7 +429,7 @@ bool Preprocessor::HandleEndOfFile(Token &Result, bool isEndOfMacro) {
|
|||
}
|
||||
|
||||
if (!isIncrementalProcessingEnabled())
|
||||
CurPPLexer = 0;
|
||||
CurPPLexer = nullptr;
|
||||
|
||||
if (TUKind == TU_Complete) {
|
||||
// This is the end of the top-level file. 'WarnUnusedMacroLocs' has
|
||||
|
@ -561,11 +561,11 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
|
|||
// We handle this by scanning for the closest real lexer, switching it to
|
||||
// raw mode and preprocessor mode. This will cause it to return \n as an
|
||||
// explicit EOD token.
|
||||
PreprocessorLexer *FoundLexer = 0;
|
||||
PreprocessorLexer *FoundLexer = nullptr;
|
||||
bool LexerWasInPPMode = false;
|
||||
for (unsigned i = 0, e = IncludeMacroStack.size(); i != e; ++i) {
|
||||
IncludeStackInfo &ISI = *(IncludeMacroStack.end()-i-1);
|
||||
if (ISI.ThePPLexer == 0) continue; // Scan for a real lexer.
|
||||
if (ISI.ThePPLexer == nullptr) continue; // Scan for a real lexer.
|
||||
|
||||
// Once we find a real lexer, mark it as raw mode (disabling macro
|
||||
// expansions) and preprocessor mode (return EOD). We know that the lexer
|
||||
|
|
|
@ -103,8 +103,8 @@ void Preprocessor::RegisterBuiltinMacros() {
|
|||
Ident__identifier = RegisterBuiltinMacro(*this, "__identifier");
|
||||
Ident__pragma = RegisterBuiltinMacro(*this, "__pragma");
|
||||
} else {
|
||||
Ident__identifier = 0;
|
||||
Ident__pragma = 0;
|
||||
Ident__identifier = nullptr;
|
||||
Ident__pragma = nullptr;
|
||||
}
|
||||
|
||||
// Clang Extensions.
|
||||
|
@ -125,10 +125,10 @@ void Preprocessor::RegisterBuiltinMacros() {
|
|||
if (!LangOpts.CurrentModule.empty())
|
||||
Ident__MODULE__ = RegisterBuiltinMacro(*this, "__MODULE__");
|
||||
else
|
||||
Ident__MODULE__ = 0;
|
||||
Ident__MODULE__ = nullptr;
|
||||
} else {
|
||||
Ident__building_module = 0;
|
||||
Ident__MODULE__ = 0;
|
||||
Ident__building_module = nullptr;
|
||||
Ident__MODULE__ = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ static bool isTrivialSingleTokenExpansion(const MacroInfo *MI,
|
|||
IdentifierInfo *II = MI->getReplacementToken(0).getIdentifierInfo();
|
||||
|
||||
// If the token isn't an identifier, it's always literally expanded.
|
||||
if (II == 0) return true;
|
||||
if (!II) return true;
|
||||
|
||||
// If the information about this identifier is out of date, update it from
|
||||
// the external source.
|
||||
|
@ -228,7 +228,8 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier,
|
|||
// If this is a builtin macro, like __LINE__ or _Pragma, handle it specially.
|
||||
if (MI->isBuiltinMacro()) {
|
||||
if (Callbacks) Callbacks->MacroExpands(Identifier, MD,
|
||||
Identifier.getLocation(),/*Args=*/0);
|
||||
Identifier.getLocation(),
|
||||
/*Args=*/nullptr);
|
||||
ExpandBuiltinMacro(Identifier);
|
||||
return true;
|
||||
}
|
||||
|
@ -236,7 +237,7 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier,
|
|||
/// Args - If this is a function-like macro expansion, this contains,
|
||||
/// for each macro argument, the list of tokens that were provided to the
|
||||
/// invocation.
|
||||
MacroArgs *Args = 0;
|
||||
MacroArgs *Args = nullptr;
|
||||
|
||||
// Remember where the end of the expansion occurred. For an object-like
|
||||
// macro, this is the identifier. For a function-like macro, this is the ')'.
|
||||
|
@ -254,7 +255,7 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier,
|
|||
InMacroArgs = false;
|
||||
|
||||
// If there was an error parsing the arguments, bail out.
|
||||
if (Args == 0) return true;
|
||||
if (!Args) return true;
|
||||
|
||||
++NumFnMacroExpanded;
|
||||
} else {
|
||||
|
@ -282,7 +283,8 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier,
|
|||
for (unsigned i=0, e = DelayedMacroExpandsCallbacks.size(); i!=e; ++i) {
|
||||
MacroExpandsInfo &Info = DelayedMacroExpandsCallbacks[i];
|
||||
// FIXME: We lose macro args info with delayed callback.
|
||||
Callbacks->MacroExpands(Info.Tok, Info.MD, Info.Range, /*Args=*/0);
|
||||
Callbacks->MacroExpands(Info.Tok, Info.MD, Info.Range,
|
||||
/*Args=*/nullptr);
|
||||
}
|
||||
DelayedMacroExpandsCallbacks.clear();
|
||||
}
|
||||
|
@ -557,7 +559,7 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName,
|
|||
<< MacroName.getIdentifierInfo();
|
||||
// Do not lose the EOF/EOD. Return it to the client.
|
||||
MacroName = Tok;
|
||||
return 0;
|
||||
return nullptr;
|
||||
} else {
|
||||
// Do not lose the EOF/EOD.
|
||||
Token *Toks = new Token[1];
|
||||
|
@ -589,7 +591,7 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName,
|
|||
// If this is a comment token in the argument list and we're just in
|
||||
// -C mode (not -CC mode), discard the comment.
|
||||
continue;
|
||||
} else if (Tok.getIdentifierInfo() != 0) {
|
||||
} else if (Tok.getIdentifierInfo() != nullptr) {
|
||||
// Reading macro arguments can cause macros that we are currently
|
||||
// expanding from to be popped off the expansion stack. Doing so causes
|
||||
// them to be reenabled for expansion. Here we record whether any
|
||||
|
@ -681,10 +683,10 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName,
|
|||
DB << *Range;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
if (FixedNumArgs != MinArgsExpected)
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
DiagnosticBuilder DB = Diag(MacroName, diag::note_suggest_parens_for_macro);
|
||||
for (SmallVector<SourceRange, 4>::iterator
|
||||
|
@ -751,7 +753,7 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName,
|
|||
Diag(Tok, diag::err_too_few_args_in_macro_invoc);
|
||||
Diag(MI->getDefinitionLoc(), diag::note_macro_here)
|
||||
<< MacroName.getIdentifierInfo();
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Add a marker EOF token to the end of the token list for this argument.
|
||||
|
@ -773,7 +775,7 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName,
|
|||
Diag(MacroName, diag::err_too_many_args_in_macro_invoc);
|
||||
Diag(MI->getDefinitionLoc(), diag::note_macro_here)
|
||||
<< MacroName.getIdentifierInfo();
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
return MacroArgs::create(MI, ArgTokens, isVarargsElided, *this);
|
||||
|
@ -788,7 +790,7 @@ Token *Preprocessor::cacheMacroExpandedTokens(TokenLexer *tokLexer,
|
|||
ArrayRef<Token> tokens) {
|
||||
assert(tokLexer);
|
||||
if (tokens.empty())
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
size_t newIndex = MacroExpandedTokens.size();
|
||||
bool cacheNeedsToGrow = tokens.size() >
|
||||
|
@ -824,7 +826,7 @@ void Preprocessor::removeCachedMacroExpandedTokensOfLastLexer() {
|
|||
/// the identifier tokens inserted.
|
||||
static void ComputeDATE_TIME(SourceLocation &DATELoc, SourceLocation &TIMELoc,
|
||||
Preprocessor &PP) {
|
||||
time_t TT = time(0);
|
||||
time_t TT = time(nullptr);
|
||||
struct tm *TM = localtime(&TT);
|
||||
|
||||
static const char * const Months[] = {
|
||||
|
@ -1154,18 +1156,18 @@ static bool EvaluateHasIncludeCommon(Token &Tok,
|
|||
// Search include directories.
|
||||
const DirectoryLookup *CurDir;
|
||||
const FileEntry *File =
|
||||
PP.LookupFile(FilenameLoc, Filename, isAngled, LookupFrom, CurDir, NULL,
|
||||
NULL, NULL);
|
||||
PP.LookupFile(FilenameLoc, Filename, isAngled, LookupFrom, CurDir,
|
||||
nullptr, nullptr, nullptr);
|
||||
|
||||
// Get the result value. A result of true means the file exists.
|
||||
return File != 0;
|
||||
return File != nullptr;
|
||||
}
|
||||
|
||||
/// EvaluateHasInclude - Process a '__has_include("path")' expression.
|
||||
/// Returns true if successful.
|
||||
static bool EvaluateHasInclude(Token &Tok, IdentifierInfo *II,
|
||||
Preprocessor &PP) {
|
||||
return EvaluateHasIncludeCommon(Tok, II, PP, NULL);
|
||||
return EvaluateHasIncludeCommon(Tok, II, PP, nullptr);
|
||||
}
|
||||
|
||||
/// EvaluateHasIncludeNext - Process '__has_include_next("path")' expression.
|
||||
|
@ -1177,9 +1179,9 @@ static bool EvaluateHasIncludeNext(Token &Tok,
|
|||
// issue a diagnostic.
|
||||
const DirectoryLookup *Lookup = PP.GetCurDirLookup();
|
||||
if (PP.isInPrimaryFile()) {
|
||||
Lookup = 0;
|
||||
Lookup = nullptr;
|
||||
PP.Diag(Tok, diag::pp_include_next_in_primary);
|
||||
} else if (Lookup == 0) {
|
||||
} else if (!Lookup) {
|
||||
PP.Diag(Tok, diag::pp_include_next_absolute_path);
|
||||
} else {
|
||||
// Start looking up in the next directory.
|
||||
|
@ -1252,7 +1254,7 @@ void Preprocessor::ExpandBuiltinMacro(Token &Tok) {
|
|||
llvm::raw_svector_ostream OS(TmpBuffer);
|
||||
|
||||
// Set up the return result.
|
||||
Tok.setIdentifierInfo(0);
|
||||
Tok.setIdentifierInfo(nullptr);
|
||||
Tok.clearFlag(Token::NeedsCleaning);
|
||||
|
||||
if (II == Ident__LINE__) {
|
||||
|
@ -1341,7 +1343,7 @@ void Preprocessor::ExpandBuiltinMacro(Token &Tok) {
|
|||
|
||||
// Get the file that we are lexing out of. If we're currently lexing from
|
||||
// a macro, dig into the include stack.
|
||||
const FileEntry *CurFile = 0;
|
||||
const FileEntry *CurFile = nullptr;
|
||||
PreprocessorLexer *TheLexer = getCurrentFileLexer();
|
||||
|
||||
if (TheLexer)
|
||||
|
@ -1371,7 +1373,7 @@ void Preprocessor::ExpandBuiltinMacro(Token &Tok) {
|
|||
SourceLocation StartLoc = Tok.getLocation();
|
||||
|
||||
bool IsValid = false;
|
||||
IdentifierInfo *FeatureII = 0;
|
||||
IdentifierInfo *FeatureII = nullptr;
|
||||
|
||||
// Read the '('.
|
||||
LexUnexpandedToken(Tok);
|
||||
|
|
|
@ -37,7 +37,7 @@ using namespace clang;
|
|||
|
||||
PTHLexer::PTHLexer(Preprocessor &PP, FileID FID, const unsigned char *D,
|
||||
const unsigned char *ppcond, PTHManager &PM)
|
||||
: PreprocessorLexer(&PP, FID), TokBuf(D), CurPtr(D), LastHashTokPtr(0),
|
||||
: PreprocessorLexer(&PP, FID), TokBuf(D), CurPtr(D), LastHashTokPtr(nullptr),
|
||||
PPCond(ppcond), CurPPCondPtr(ppcond), PTHMgr(PM) {
|
||||
|
||||
FileStartLoc = PP.getSourceManager().getLocForStartOfFile(FID);
|
||||
|
@ -191,7 +191,7 @@ bool PTHLexer::SkipBlock() {
|
|||
assert(CurPPCondPtr && "No cached PP conditional information.");
|
||||
assert(LastHashTokPtr && "No known '#' token.");
|
||||
|
||||
const unsigned char* HashEntryI = 0;
|
||||
const unsigned char *HashEntryI = nullptr;
|
||||
uint32_t TableIdx;
|
||||
|
||||
do {
|
||||
|
@ -423,7 +423,7 @@ PTHManager::PTHManager(const llvm::MemoryBuffer* buf, void* fileLookup,
|
|||
const char* originalSourceFile)
|
||||
: Buf(buf), PerIDCache(perIDCache), FileLookup(fileLookup),
|
||||
IdDataTable(idDataTable), StringIdLookup(stringIdLookup),
|
||||
NumIds(numIds), PP(0), SpellingBase(spellingBase),
|
||||
NumIds(numIds), PP(nullptr), SpellingBase(spellingBase),
|
||||
OriginalSourceFile(originalSourceFile) {}
|
||||
|
||||
PTHManager::~PTHManager() {
|
||||
|
@ -445,7 +445,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
if (llvm::MemoryBuffer::getFile(file, File)) {
|
||||
// FIXME: Add ec.message() to this diag.
|
||||
Diags.Report(diag::err_invalid_pth_file) << file;
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
using namespace llvm::support;
|
||||
|
@ -459,7 +459,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
if ((BufEnd - BufBeg) < (signed)(sizeof("cfe-pth") + 4 + 4) ||
|
||||
memcmp(BufBeg, "cfe-pth", sizeof("cfe-pth")) != 0) {
|
||||
Diags.Report(diag::err_invalid_pth_file) << file;
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Read the PTH version.
|
||||
|
@ -471,7 +471,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
Version < PTHManager::Version
|
||||
? "PTH file uses an older PTH format that is no longer supported"
|
||||
: "PTH file uses a newer PTH format that cannot be read");
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Compute the address of the index table at the end of the PTH file.
|
||||
|
@ -479,7 +479,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
|
||||
if (PrologueOffset >= BufEnd) {
|
||||
Diags.Report(diag::err_invalid_pth_file) << file;
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Construct the file lookup table. This will be used for mapping from
|
||||
|
@ -490,7 +490,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
|
||||
if (!(FileTable > BufBeg && FileTable < BufEnd)) {
|
||||
Diags.Report(diag::err_invalid_pth_file) << file;
|
||||
return 0; // FIXME: Proper error diagnostic?
|
||||
return nullptr; // FIXME: Proper error diagnostic?
|
||||
}
|
||||
|
||||
std::unique_ptr<PTHFileLookup> FL(PTHFileLookup::Create(FileTable, BufBeg));
|
||||
|
@ -508,7 +508,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
|
||||
if (!(IData >= BufBeg && IData < BufEnd)) {
|
||||
Diags.Report(diag::err_invalid_pth_file) << file;
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Get the location of the hashtable mapping between strings and
|
||||
|
@ -518,7 +518,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
BufBeg + endian::readNext<uint32_t, little, aligned>(StringIdTableOffset);
|
||||
if (!(StringIdTable >= BufBeg && StringIdTable < BufEnd)) {
|
||||
Diags.Report(diag::err_invalid_pth_file) << file;
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::unique_ptr<PTHStringIdLookup> SL(
|
||||
|
@ -530,7 +530,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
BufBeg + endian::readNext<uint32_t, little, aligned>(spellingBaseOffset);
|
||||
if (!(spellingBase >= BufBeg && spellingBase < BufEnd)) {
|
||||
Diags.Report(diag::err_invalid_pth_file) << file;
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Get the number of IdentifierInfos and pre-allocate the identifier cache.
|
||||
|
@ -539,13 +539,13 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
// Pre-allocate the persistent ID -> IdentifierInfo* cache. We use calloc()
|
||||
// so that we in the best case only zero out memory once when the OS returns
|
||||
// us new pages.
|
||||
IdentifierInfo** PerIDCache = 0;
|
||||
IdentifierInfo **PerIDCache = nullptr;
|
||||
|
||||
if (NumIds) {
|
||||
PerIDCache = (IdentifierInfo**)calloc(NumIds, sizeof(*PerIDCache));
|
||||
if (!PerIDCache) {
|
||||
InvalidPTH(Diags, "Could not allocate memory for processing PTH file");
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -553,7 +553,7 @@ PTHManager *PTHManager::Create(const std::string &file,
|
|||
const unsigned char* originalSourceBase = PrologueOffset + sizeof(uint32_t)*4;
|
||||
unsigned len =
|
||||
endian::readNext<uint16_t, little, unaligned>(originalSourceBase);
|
||||
if (!len) originalSourceBase = 0;
|
||||
if (!len) originalSourceBase = nullptr;
|
||||
|
||||
// Create the new PTHManager.
|
||||
return new PTHManager(File.release(), FL.release(), IData, PerIDCache,
|
||||
|
@ -591,7 +591,7 @@ IdentifierInfo* PTHManager::get(StringRef Name) {
|
|||
PTHStringIdLookup::iterator I = SL.find(std::make_pair(Name.data(),
|
||||
Name.size()));
|
||||
if (I == SL.end()) // No identifier found?
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
// Match found. Return the identifier!
|
||||
assert(*I > 0);
|
||||
|
@ -601,7 +601,7 @@ IdentifierInfo* PTHManager::get(StringRef Name) {
|
|||
PTHLexer *PTHManager::CreateLexer(FileID FID) {
|
||||
const FileEntry *FE = PP->getSourceManager().getFileEntryForID(FID);
|
||||
if (!FE)
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
using namespace llvm::support;
|
||||
|
||||
|
@ -612,7 +612,7 @@ PTHLexer *PTHManager::CreateLexer(FileID FID) {
|
|||
PTHFileLookup::iterator I = PFL.find(FE);
|
||||
|
||||
if (I == PFL.end()) // No tokens available?
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
const PTHFileData& FileData = *I;
|
||||
|
||||
|
@ -623,7 +623,7 @@ PTHLexer *PTHManager::CreateLexer(FileID FID) {
|
|||
// Get the location of pp-conditional table.
|
||||
const unsigned char* ppcond = BufStart + FileData.getPPCondOffset();
|
||||
uint32_t Len = endian::readNext<uint32_t, little, aligned>(ppcond);
|
||||
if (Len == 0) ppcond = 0;
|
||||
if (Len == 0) ppcond = nullptr;
|
||||
|
||||
assert(PP && "No preprocessor set yet!");
|
||||
return new PTHLexer(*PP, FID, data, ppcond, *this);
|
||||
|
|
|
@ -59,7 +59,7 @@ PragmaHandler *PragmaNamespace::FindHandler(StringRef Name,
|
|||
bool IgnoreNull) const {
|
||||
if (PragmaHandler *Handler = Handlers.lookup(Name))
|
||||
return Handler;
|
||||
return IgnoreNull ? 0 : Handlers.lookup(StringRef());
|
||||
return IgnoreNull ? nullptr : Handlers.lookup(StringRef());
|
||||
}
|
||||
|
||||
void PragmaNamespace::AddPragma(PragmaHandler *Handler) {
|
||||
|
@ -88,7 +88,7 @@ void PragmaNamespace::HandlePragma(Preprocessor &PP,
|
|||
= FindHandler(Tok.getIdentifierInfo() ? Tok.getIdentifierInfo()->getName()
|
||||
: StringRef(),
|
||||
/*IgnoreNull=*/false);
|
||||
if (Handler == 0) {
|
||||
if (!Handler) {
|
||||
PP.Diag(Tok, diag::warn_pragma_ignored);
|
||||
return;
|
||||
}
|
||||
|
@ -290,7 +290,7 @@ void Preprocessor::Handle_Pragma(Token &Tok) {
|
|||
Lexer *TL = Lexer::Create_PragmaLexer(TokLoc, PragmaLoc, RParenLoc,
|
||||
StrVal.size(), *this);
|
||||
|
||||
EnterSourceFileWithLexer(TL, 0);
|
||||
EnterSourceFileWithLexer(TL, nullptr);
|
||||
|
||||
// With everything set up, lex this as a #pragma directive.
|
||||
HandlePragmaDirective(PragmaLoc, PIK__Pragma);
|
||||
|
@ -473,8 +473,9 @@ void Preprocessor::HandlePragmaDependency(Token &DependencyTok) {
|
|||
// Search include directories for this file.
|
||||
const DirectoryLookup *CurDir;
|
||||
const FileEntry *File = LookupFile(FilenameTok.getLocation(), Filename,
|
||||
isAngled, 0, CurDir, NULL, NULL, NULL);
|
||||
if (File == 0) {
|
||||
isAngled, nullptr, CurDir, nullptr,
|
||||
nullptr, nullptr);
|
||||
if (!File) {
|
||||
if (!SuppressIncludeNotFoundError)
|
||||
Diag(FilenameTok, diag::err_pp_file_not_found) << Filename;
|
||||
return;
|
||||
|
@ -510,7 +511,7 @@ IdentifierInfo *Preprocessor::ParsePragmaPushOrPopMacro(Token &Tok) {
|
|||
if (Tok.isNot(tok::l_paren)) {
|
||||
Diag(PragmaTok.getLocation(), diag::err_pragma_push_pop_macro_malformed)
|
||||
<< getSpelling(PragmaTok);
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Read the macro name string.
|
||||
|
@ -518,12 +519,12 @@ IdentifierInfo *Preprocessor::ParsePragmaPushOrPopMacro(Token &Tok) {
|
|||
if (Tok.isNot(tok::string_literal)) {
|
||||
Diag(PragmaTok.getLocation(), diag::err_pragma_push_pop_macro_malformed)
|
||||
<< getSpelling(PragmaTok);
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (Tok.hasUDSuffix()) {
|
||||
Diag(Tok, diag::err_invalid_string_udl);
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Remember the macro string.
|
||||
|
@ -534,7 +535,7 @@ IdentifierInfo *Preprocessor::ParsePragmaPushOrPopMacro(Token &Tok) {
|
|||
if (Tok.isNot(tok::r_paren)) {
|
||||
Diag(PragmaTok.getLocation(), diag::err_pragma_push_pop_macro_malformed)
|
||||
<< getSpelling(PragmaTok);
|
||||
return 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
assert(StrVal[0] == '"' && StrVal[StrVal.size()-1] == '"' &&
|
||||
|
@ -736,7 +737,7 @@ void Preprocessor::AddPragmaHandler(StringRef Namespace,
|
|||
// we already have the namespace to insert into.
|
||||
if (PragmaHandler *Existing = PragmaHandlers->FindHandler(Namespace)) {
|
||||
InsertNS = Existing->getIfNamespace();
|
||||
assert(InsertNS != 0 && "Cannot have a pragma namespace and pragma"
|
||||
assert(InsertNS != nullptr && "Cannot have a pragma namespace and pragma"
|
||||
" handler with the same name!");
|
||||
} else {
|
||||
// Otherwise, this namespace doesn't exist yet, create and insert the
|
||||
|
|
|
@ -40,7 +40,7 @@ InclusionDirective::InclusionDirective(PreprocessingRecord &PPRec,
|
|||
|
||||
PreprocessingRecord::PreprocessingRecord(SourceManager &SM)
|
||||
: SourceMgr(SM),
|
||||
ExternalSource(0) {
|
||||
ExternalSource(nullptr) {
|
||||
}
|
||||
|
||||
/// \brief Returns a pair of [Begin, End) iterators of preprocessed entities
|
||||
|
@ -334,7 +334,7 @@ PreprocessedEntity *PreprocessingRecord::getPreprocessedEntity(PPEntityID PPID){
|
|||
}
|
||||
|
||||
if (PPID.ID == 0)
|
||||
return 0;
|
||||
return nullptr;
|
||||
unsigned Index = PPID.ID - 1;
|
||||
assert(Index < PreprocessedEntities.size() &&
|
||||
"Out-of bounds local preprocessed entity");
|
||||
|
@ -361,7 +361,7 @@ MacroDefinition *PreprocessingRecord::findMacroDefinition(const MacroInfo *MI) {
|
|||
llvm::DenseMap<const MacroInfo *, MacroDefinition *>::iterator Pos
|
||||
= MacroDefinitions.find(MI);
|
||||
if (Pos == MacroDefinitions.end())
|
||||
return 0;
|
||||
return nullptr;
|
||||
|
||||
return Pos->second;
|
||||
}
|
||||
|
|
|
@ -62,14 +62,15 @@ Preprocessor::Preprocessor(IntrusiveRefCntPtr<PreprocessorOptions> PPOpts,
|
|||
TranslationUnitKind TUKind)
|
||||
: PPOpts(PPOpts), Diags(&diags), LangOpts(opts), Target(0),
|
||||
FileMgr(Headers.getFileMgr()), SourceMgr(SM), HeaderInfo(Headers),
|
||||
TheModuleLoader(TheModuleLoader), ExternalSource(0),
|
||||
TheModuleLoader(TheModuleLoader), ExternalSource(nullptr),
|
||||
Identifiers(opts, IILookup), IncrementalProcessing(false), TUKind(TUKind),
|
||||
CodeComplete(0), CodeCompletionFile(0), CodeCompletionOffset(0),
|
||||
LastTokenWasAt(false), ModuleImportExpectsIdentifier(false),
|
||||
CodeCompletionReached(0), SkipMainFilePreamble(0, true), CurPPLexer(0),
|
||||
CurDirLookup(0), CurLexerKind(CLK_Lexer), CurSubmodule(0), Callbacks(0),
|
||||
MacroArgCache(0), Record(0), MIChainHead(0), MICache(0),
|
||||
DeserialMIChainHead(0) {
|
||||
CodeComplete(nullptr), CodeCompletionFile(nullptr),
|
||||
CodeCompletionOffset(0), LastTokenWasAt(false),
|
||||
ModuleImportExpectsIdentifier(false), CodeCompletionReached(0),
|
||||
SkipMainFilePreamble(0, true), CurPPLexer(nullptr),
|
||||
CurDirLookup(nullptr), CurLexerKind(CLK_Lexer), CurSubmodule(nullptr),
|
||||
Callbacks(nullptr), MacroArgCache(nullptr), Record(nullptr),
|
||||
MIChainHead(nullptr), MICache(nullptr), DeserialMIChainHead(0) {
|
||||
OwnsHeaderSearch = OwnsHeaders;
|
||||
|
||||
ScratchBuf = new ScratchBuffer(SourceMgr);
|
||||
|
@ -127,9 +128,11 @@ Preprocessor::Preprocessor(IntrusiveRefCntPtr<PreprocessorOptions> PPOpts,
|
|||
Ident___abnormal_termination = getIdentifierInfo("__abnormal_termination");
|
||||
Ident_AbnormalTermination = getIdentifierInfo("AbnormalTermination");
|
||||
} else {
|
||||
Ident__exception_info = Ident__exception_code = Ident__abnormal_termination = 0;
|
||||
Ident___exception_info = Ident___exception_code = Ident___abnormal_termination = 0;
|
||||
Ident_GetExceptionInfo = Ident_GetExceptionCode = Ident_AbnormalTermination = 0;
|
||||
Ident__exception_info = Ident__exception_code = nullptr;
|
||||
Ident__abnormal_termination = Ident___exception_info = nullptr;
|
||||
Ident___exception_code = Ident___abnormal_termination = nullptr;
|
||||
Ident_GetExceptionInfo = Ident_GetExceptionCode = nullptr;
|
||||
Ident_AbnormalTermination = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -438,8 +441,8 @@ void Preprocessor::CreateString(StringRef Str, Token &Tok,
|
|||
|
||||
Module *Preprocessor::getCurrentModule() {
|
||||
if (getLangOpts().CurrentModule.empty())
|
||||
return 0;
|
||||
|
||||
return nullptr;
|
||||
|
||||
return getHeaderSearchInfo().lookupModule(getLangOpts().CurrentModule);
|
||||
}
|
||||
|
||||
|
@ -461,8 +464,8 @@ void Preprocessor::EnterMainSourceFile() {
|
|||
// a main file.
|
||||
if (!SourceMgr.isLoadedFileID(MainFileID)) {
|
||||
// Enter the main file source buffer.
|
||||
EnterSourceFile(MainFileID, 0, SourceLocation());
|
||||
|
||||
EnterSourceFile(MainFileID, nullptr, SourceLocation());
|
||||
|
||||
// If we've been asked to skip bytes in the main file (e.g., as part of a
|
||||
// precompiled preamble), do so now.
|
||||
if (SkipMainFilePreamble.first > 0)
|
||||
|
@ -484,7 +487,7 @@ void Preprocessor::EnterMainSourceFile() {
|
|||
setPredefinesFileID(FID);
|
||||
|
||||
// Start parsing the predefines.
|
||||
EnterSourceFile(FID, 0, SourceLocation());
|
||||
EnterSourceFile(FID, nullptr, SourceLocation());
|
||||
}
|
||||
|
||||
void Preprocessor::EndSourceFile() {
|
||||
|
@ -630,7 +633,7 @@ bool Preprocessor::HandleIdentifier(Token &Identifier) {
|
|||
// then we act as if it is the actual operator and not the textual
|
||||
// representation of it.
|
||||
if (II.isCPlusPlusOperatorKeyword())
|
||||
Identifier.setIdentifierInfo(0);
|
||||
Identifier.setIdentifierInfo(nullptr);
|
||||
|
||||
// If this is an extension token, diagnose its use.
|
||||
// We avoid diagnosing tokens that originate from macro definitions.
|
||||
|
|
|
@ -21,7 +21,8 @@ using namespace clang;
|
|||
//than a page, almost certainly enough for anything. :)
|
||||
static const unsigned ScratchBufSize = 4060;
|
||||
|
||||
ScratchBuffer::ScratchBuffer(SourceManager &SM) : SourceMgr(SM), CurBuffer(0) {
|
||||
ScratchBuffer::ScratchBuffer(SourceManager &SM)
|
||||
: SourceMgr(SM), CurBuffer(nullptr) {
|
||||
// Set BytesUsed so that the first call to getToken will require an alloc.
|
||||
BytesUsed = ScratchBufSize;
|
||||
}
|
||||
|
|
|
@ -86,8 +86,8 @@ void TokenLexer::Init(const Token *TokArray, unsigned NumToks,
|
|||
// associated with it.
|
||||
destroy();
|
||||
|
||||
Macro = 0;
|
||||
ActualArgs = 0;
|
||||
Macro = nullptr;
|
||||
ActualArgs = nullptr;
|
||||
Tokens = TokArray;
|
||||
OwnsTokens = ownsTokens;
|
||||
DisableMacroExpansion = disableMacroExpansion;
|
||||
|
@ -113,7 +113,7 @@ void TokenLexer::destroy() {
|
|||
// the expanded tokens.
|
||||
if (OwnsTokens) {
|
||||
delete [] Tokens;
|
||||
Tokens = 0;
|
||||
Tokens = nullptr;
|
||||
OwnsTokens = false;
|
||||
}
|
||||
|
||||
|
@ -480,7 +480,7 @@ bool TokenLexer::Lex(Token &Tok) {
|
|||
HasLeadingSpace = false;
|
||||
|
||||
// Handle recursive expansion!
|
||||
if (!Tok.isAnnotation() && Tok.getIdentifierInfo() != 0) {
|
||||
if (!Tok.isAnnotation() && Tok.getIdentifierInfo() != nullptr) {
|
||||
// Change the kind of this identifier to the appropriate token kind, e.g.
|
||||
// turning "for" into a keyword.
|
||||
IdentifierInfo *II = Tok.getIdentifierInfo();
|
||||
|
@ -507,7 +507,7 @@ bool TokenLexer::Lex(Token &Tok) {
|
|||
/// If this returns true, the caller should immediately return the token.
|
||||
bool TokenLexer::PasteTokens(Token &Tok) {
|
||||
SmallString<128> Buffer;
|
||||
const char *ResultTokStrPtr = 0;
|
||||
const char *ResultTokStrPtr = nullptr;
|
||||
SourceLocation StartLoc = Tok.getLocation();
|
||||
SourceLocation PasteOpLoc;
|
||||
do {
|
||||
|
|
Loading…
Reference in New Issue