Revert "[AArch64] Improve TargetParser API"

Buildbots unhappy about constexpr function.

This reverts commit 450de8008b.
This commit is contained in:
Tomas Matheson 2022-12-01 13:06:54 +00:00
parent 450de8008b
commit d1ef4b0a8d
9 changed files with 505 additions and 426 deletions

View File

@ -45,6 +45,28 @@ const Builtin::Info AArch64TargetInfo::BuiltinInfo[] = {
#include "clang/Basic/BuiltinsAArch64.def" #include "clang/Basic/BuiltinsAArch64.def"
}; };
static StringRef getArchVersionString(llvm::AArch64::ArchKind Kind) {
switch (Kind) {
case llvm::AArch64::ArchKind::ARMV9A:
case llvm::AArch64::ArchKind::ARMV9_1A:
case llvm::AArch64::ArchKind::ARMV9_2A:
case llvm::AArch64::ArchKind::ARMV9_3A:
case llvm::AArch64::ArchKind::ARMV9_4A:
return "9";
default:
return "8";
}
}
StringRef AArch64TargetInfo::getArchProfile() const {
switch (ArchKind) {
case llvm::AArch64::ArchKind::ARMV8R:
return "R";
default:
return "A";
}
}
AArch64TargetInfo::AArch64TargetInfo(const llvm::Triple &Triple, AArch64TargetInfo::AArch64TargetInfo(const llvm::Triple &Triple,
const TargetOptions &Opts) const TargetOptions &Opts)
: TargetInfo(Triple), ABI("aapcs") { : TargetInfo(Triple), ABI("aapcs") {
@ -148,7 +170,7 @@ bool AArch64TargetInfo::validateBranchProtection(StringRef Spec, StringRef,
bool AArch64TargetInfo::isValidCPUName(StringRef Name) const { bool AArch64TargetInfo::isValidCPUName(StringRef Name) const {
return Name == "generic" || return Name == "generic" ||
llvm::AArch64::parseCpu(Name).Arch != llvm::AArch64::INVALID; llvm::AArch64::parseCPUArch(Name) != llvm::AArch64::ArchKind::INVALID;
} }
bool AArch64TargetInfo::setCPU(const std::string &Name) { bool AArch64TargetInfo::setCPU(const std::string &Name) {
@ -276,10 +298,8 @@ void AArch64TargetInfo::getTargetDefines(const LangOptions &Opts,
// ACLE predefines. Many can only have one possible value on v8 AArch64. // ACLE predefines. Many can only have one possible value on v8 AArch64.
Builder.defineMacro("__ARM_ACLE", "200"); Builder.defineMacro("__ARM_ACLE", "200");
Builder.defineMacro("__ARM_ARCH", Builder.defineMacro("__ARM_ARCH", getArchVersionString(ArchKind));
std::to_string(ArchInfo->Version.getMajor())); Builder.defineMacro("__ARM_ARCH_PROFILE", "'" + getArchProfile() + "'");
Builder.defineMacro("__ARM_ARCH_PROFILE",
std::string("'") + (char)ArchInfo->Profile + "'");
Builder.defineMacro("__ARM_64BIT_STATE", "1"); Builder.defineMacro("__ARM_64BIT_STATE", "1");
Builder.defineMacro("__ARM_PCS_AAPCS64", "1"); Builder.defineMacro("__ARM_PCS_AAPCS64", "1");
@ -444,34 +464,52 @@ void AArch64TargetInfo::getTargetDefines(const LangOptions &Opts,
if (HasD128) if (HasD128)
Builder.defineMacro("__ARM_FEATURE_SYSREG128", "1"); Builder.defineMacro("__ARM_FEATURE_SYSREG128", "1");
if (*ArchInfo == llvm::AArch64::ARMV8_1A) switch (ArchKind) {
default:
break;
case llvm::AArch64::ArchKind::ARMV8_1A:
getTargetDefinesARMV81A(Opts, Builder); getTargetDefinesARMV81A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_2A) break;
case llvm::AArch64::ArchKind::ARMV8_2A:
getTargetDefinesARMV82A(Opts, Builder); getTargetDefinesARMV82A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_3A) break;
case llvm::AArch64::ArchKind::ARMV8_3A:
getTargetDefinesARMV83A(Opts, Builder); getTargetDefinesARMV83A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_4A) break;
case llvm::AArch64::ArchKind::ARMV8_4A:
getTargetDefinesARMV84A(Opts, Builder); getTargetDefinesARMV84A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_5A) break;
case llvm::AArch64::ArchKind::ARMV8_5A:
getTargetDefinesARMV85A(Opts, Builder); getTargetDefinesARMV85A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_6A) break;
case llvm::AArch64::ArchKind::ARMV8_6A:
getTargetDefinesARMV86A(Opts, Builder); getTargetDefinesARMV86A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_7A) break;
case llvm::AArch64::ArchKind::ARMV8_7A:
getTargetDefinesARMV87A(Opts, Builder); getTargetDefinesARMV87A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_8A) break;
case llvm::AArch64::ArchKind::ARMV8_8A:
getTargetDefinesARMV88A(Opts, Builder); getTargetDefinesARMV88A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV8_9A) break;
case llvm::AArch64::ArchKind::ARMV8_9A:
getTargetDefinesARMV89A(Opts, Builder); getTargetDefinesARMV89A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV9A) break;
case llvm::AArch64::ArchKind::ARMV9A:
getTargetDefinesARMV9A(Opts, Builder); getTargetDefinesARMV9A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV9_1A) break;
case llvm::AArch64::ArchKind::ARMV9_1A:
getTargetDefinesARMV91A(Opts, Builder); getTargetDefinesARMV91A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV9_2A) break;
case llvm::AArch64::ArchKind::ARMV9_2A:
getTargetDefinesARMV92A(Opts, Builder); getTargetDefinesARMV92A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV9_3A) break;
case llvm::AArch64::ArchKind::ARMV9_3A:
getTargetDefinesARMV93A(Opts, Builder); getTargetDefinesARMV93A(Opts, Builder);
if (*ArchInfo == llvm::AArch64::ARMV9_4A) break;
case llvm::AArch64::ArchKind::ARMV9_4A:
getTargetDefinesARMV94A(Opts, Builder); getTargetDefinesARMV94A(Opts, Builder);
break;
}
// All of the __sync_(bool|val)_compare_and_swap_(1|2|4|8) builtins work. // All of the __sync_(bool|val)_compare_and_swap_(1|2|4|8) builtins work.
Builder.defineMacro("__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1"); Builder.defineMacro("__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1");
@ -521,17 +559,17 @@ bool AArch64TargetInfo::hasFeature(StringRef Feature) const {
void AArch64TargetInfo::setFeatureEnabled(llvm::StringMap<bool> &Features, void AArch64TargetInfo::setFeatureEnabled(llvm::StringMap<bool> &Features,
StringRef Name, bool Enabled) const { StringRef Name, bool Enabled) const {
Features[Name] = Enabled; Features[Name] = Enabled;
// If this "feature" is an architecture, also add features for all previous llvm::AArch64::ArchKind AK = llvm::AArch64::getSubArchArchKind(Name);
// architecture versions. In case of v9.x the v8.x counterparts are added too. // Add all previous architecture versions.
const llvm::AArch64::ArchInfo &ArchInfo = // In case of v9.x the v8.x counterparts are added too.
llvm::AArch64::ArchInfo::findBySubArch(Name); if ("9" == getArchVersionString(AK))
for (llvm::AArch64::ArchKind I = llvm::AArch64::convertV9toV8(AK);
I != llvm::AArch64::ArchKind::INVALID; --I)
Features[llvm::AArch64::getSubArch(I)] = Enabled;
if (ArchInfo == llvm::AArch64::INVALID) for (llvm::AArch64::ArchKind I = --AK; I != llvm::AArch64::ArchKind::INVALID;
return; // Not an architecure, nothing more to do. --I)
Features[llvm::AArch64::getSubArch(I)] = Enabled;
for (const auto *OtherArch : llvm::AArch64::ArchInfos)
if (ArchInfo.implies(*OtherArch))
Features[OtherArch->getSubArch()] = Enabled;
} }
bool AArch64TargetInfo::handleTargetFeatures(std::vector<std::string> &Features, bool AArch64TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
@ -564,6 +602,8 @@ bool AArch64TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
HasD128 = false; HasD128 = false;
HasRCPC = false; HasRCPC = false;
ArchKind = llvm::AArch64::ArchKind::INVALID;
for (const auto &Feature : Features) { for (const auto &Feature : Features) {
if (Feature == "+neon") if (Feature == "+neon")
FPU |= NeonMode; FPU |= NeonMode;
@ -625,51 +665,38 @@ bool AArch64TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
if (Feature == "+strict-align") if (Feature == "+strict-align")
HasUnaligned = false; HasUnaligned = false;
// All predecessor archs are added but select the latest one for ArchKind. // All predecessor archs are added but select the latest one for ArchKind.
if (Feature == "+v8a" && ArchInfo->Version < llvm::AArch64::ARMV8A.Version) if (Feature == "+v8a" && ArchKind < llvm::AArch64::ArchKind::ARMV8A)
ArchInfo = &llvm::AArch64::ARMV8A; ArchKind = llvm::AArch64::ArchKind::ARMV8A;
if (Feature == "+v8.1a" && if (Feature == "+v8.1a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_1A)
ArchInfo->Version < llvm::AArch64::ARMV8_1A.Version) ArchKind = llvm::AArch64::ArchKind::ARMV8_1A;
ArchInfo = &llvm::AArch64::ARMV8_1A; if (Feature == "+v8.2a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_2A)
if (Feature == "+v8.2a" && ArchKind = llvm::AArch64::ArchKind::ARMV8_2A;
ArchInfo->Version < llvm::AArch64::ARMV8_2A.Version) if (Feature == "+v8.3a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_3A)
ArchInfo = &llvm::AArch64::ARMV8_2A; ArchKind = llvm::AArch64::ArchKind::ARMV8_3A;
if (Feature == "+v8.3a" && if (Feature == "+v8.4a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_4A)
ArchInfo->Version < llvm::AArch64::ARMV8_3A.Version) ArchKind = llvm::AArch64::ArchKind::ARMV8_4A;
ArchInfo = &llvm::AArch64::ARMV8_3A; if (Feature == "+v8.5a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_5A)
if (Feature == "+v8.4a" && ArchKind = llvm::AArch64::ArchKind::ARMV8_5A;
ArchInfo->Version < llvm::AArch64::ARMV8_4A.Version) if (Feature == "+v8.6a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_6A)
ArchInfo = &llvm::AArch64::ARMV8_4A; ArchKind = llvm::AArch64::ArchKind::ARMV8_6A;
if (Feature == "+v8.5a" && if (Feature == "+v8.7a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_7A)
ArchInfo->Version < llvm::AArch64::ARMV8_5A.Version) ArchKind = llvm::AArch64::ArchKind::ARMV8_7A;
ArchInfo = &llvm::AArch64::ARMV8_5A; if (Feature == "+v8.8a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_8A)
if (Feature == "+v8.6a" && ArchKind = llvm::AArch64::ArchKind::ARMV8_8A;
ArchInfo->Version < llvm::AArch64::ARMV8_6A.Version) if (Feature == "+v8.9a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_9A)
ArchInfo = &llvm::AArch64::ARMV8_6A; ArchKind = llvm::AArch64::ArchKind::ARMV8_9A;
if (Feature == "+v8.7a" && if (Feature == "+v9a" && ArchKind < llvm::AArch64::ArchKind::ARMV9A)
ArchInfo->Version < llvm::AArch64::ARMV8_7A.Version) ArchKind = llvm::AArch64::ArchKind::ARMV9A;
ArchInfo = &llvm::AArch64::ARMV8_7A; if (Feature == "+v9.1a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_1A)
if (Feature == "+v8.8a" && ArchKind = llvm::AArch64::ArchKind::ARMV9_1A;
ArchInfo->Version < llvm::AArch64::ARMV8_8A.Version) if (Feature == "+v9.2a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_2A)
ArchInfo = &llvm::AArch64::ARMV8_8A; ArchKind = llvm::AArch64::ArchKind::ARMV9_2A;
if (Feature == "+v8.9a" && if (Feature == "+v9.3a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_3A)
ArchInfo->Version < llvm::AArch64::ARMV8_9A.Version) ArchKind = llvm::AArch64::ArchKind::ARMV9_3A;
ArchInfo = &llvm::AArch64::ARMV8_9A; if (Feature == "+v9.4a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_4A)
if (Feature == "+v9a" && ArchInfo->Version < llvm::AArch64::ARMV9A.Version) ArchKind = llvm::AArch64::ArchKind::ARMV9_4A;
ArchInfo = &llvm::AArch64::ARMV9A;
if (Feature == "+v9.1a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_1A.Version)
ArchInfo = &llvm::AArch64::ARMV9_1A;
if (Feature == "+v9.2a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_2A.Version)
ArchInfo = &llvm::AArch64::ARMV9_2A;
if (Feature == "+v9.3a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_3A.Version)
ArchInfo = &llvm::AArch64::ARMV9_3A;
if (Feature == "+v9.4a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_4A.Version)
ArchInfo = &llvm::AArch64::ARMV9_4A;
if (Feature == "+v8r") if (Feature == "+v8r")
ArchInfo = &llvm::AArch64::ARMV8R; ArchKind = llvm::AArch64::ArchKind::ARMV8R;
if (Feature == "+fullfp16") if (Feature == "+fullfp16")
HasFullFP16 = true; HasFullFP16 = true;
if (Feature == "+dotprod") if (Feature == "+dotprod")
@ -717,8 +744,8 @@ bool AArch64TargetInfo::initFeatureMap(
llvm::StringMap<bool> &Features, DiagnosticsEngine &Diags, StringRef CPU, llvm::StringMap<bool> &Features, DiagnosticsEngine &Diags, StringRef CPU,
const std::vector<std::string> &FeaturesVec) const { const std::vector<std::string> &FeaturesVec) const {
// Parse the CPU and add any implied features. // Parse the CPU and add any implied features.
const llvm::AArch64::ArchInfo &Arch = llvm::AArch64::parseCpu(CPU).Arch; llvm::AArch64::ArchKind Arch = llvm::AArch64::parseCPUArch(CPU);
if (Arch != llvm::AArch64::INVALID) { if (Arch != llvm::AArch64::ArchKind::INVALID) {
uint64_t Exts = llvm::AArch64::getDefaultExtensions(CPU, Arch); uint64_t Exts = llvm::AArch64::getDefaultExtensions(CPU, Arch);
std::vector<StringRef> CPUFeats; std::vector<StringRef> CPUFeats;
llvm::AArch64::getExtensionFeatures(Exts, CPUFeats); llvm::AArch64::getExtensionFeatures(Exts, CPUFeats);
@ -779,13 +806,13 @@ ParsedTargetAttr AArch64TargetInfo::parseTargetAttr(StringRef Features) const {
FoundArch = true; FoundArch = true;
std::pair<StringRef, StringRef> Split = std::pair<StringRef, StringRef> Split =
Feature.split("=").second.trim().split("+"); Feature.split("=").second.trim().split("+");
const llvm::AArch64::ArchInfo &AI = llvm::AArch64::parseArch(Split.first); llvm::AArch64::ArchKind ArchKind = llvm::AArch64::parseArch(Split.first);
// Parse the architecture version, adding the required features to // Parse the architecture version, adding the required features to
// Ret.Features. // Ret.Features.
if (AI == llvm::AArch64::INVALID) if (ArchKind == llvm::AArch64::ArchKind::INVALID)
continue; continue;
Ret.Features.push_back(AI.ArchFeature.str()); Ret.Features.push_back(llvm::AArch64::getArchFeature(ArchKind).str());
// Add any extra features, after the + // Add any extra features, after the +
SplitAndAddFeatures(Split.second, Ret.Features); SplitAndAddFeatures(Split.second, Ret.Features);
} else if (Feature.startswith("cpu=")) { } else if (Feature.startswith("cpu=")) {

View File

@ -56,11 +56,12 @@ class LLVM_LIBRARY_VISIBILITY AArch64TargetInfo : public TargetInfo {
bool HasD128; bool HasD128;
bool HasRCPC; bool HasRCPC;
const llvm::AArch64::ArchInfo *ArchInfo = &llvm::AArch64::ARMV8A; llvm::AArch64::ArchKind ArchKind;
static const Builtin::Info BuiltinInfo[]; static const Builtin::Info BuiltinInfo[];
std::string ABI; std::string ABI;
StringRef getArchProfile() const;
public: public:
AArch64TargetInfo(const llvm::Triple &Triple, const TargetOptions &Opts); AArch64TargetInfo(const llvm::Triple &Triple, const TargetOptions &Opts);

View File

@ -70,7 +70,7 @@ std::string aarch64::getAArch64TargetCPU(const ArgList &Args,
// Decode AArch64 features from string like +[no]featureA+[no]featureB+... // Decode AArch64 features from string like +[no]featureA+[no]featureB+...
static bool DecodeAArch64Features(const Driver &D, StringRef text, static bool DecodeAArch64Features(const Driver &D, StringRef text,
std::vector<StringRef> &Features, std::vector<StringRef> &Features,
const llvm::AArch64::ArchInfo &ArchInfo) { llvm::AArch64::ArchKind ArchKind) {
SmallVector<StringRef, 8> Split; SmallVector<StringRef, 8> Split;
text.split(Split, StringRef("+"), -1, false); text.split(Split, StringRef("+"), -1, false);
@ -104,14 +104,14 @@ static bool DecodeAArch64Features(const Driver &D, StringRef text,
// +sve implies +f32mm if the base architecture is >= v8.6A (except v9A) // +sve implies +f32mm if the base architecture is >= v8.6A (except v9A)
// It isn't the case in general that sve implies both f64mm and f32mm // It isn't the case in general that sve implies both f64mm and f32mm
if ((ArchInfo == llvm::AArch64::ARMV8_6A || if ((ArchKind == llvm::AArch64::ArchKind::ARMV8_6A ||
ArchInfo == llvm::AArch64::ARMV8_7A || ArchKind == llvm::AArch64::ArchKind::ARMV8_7A ||
ArchInfo == llvm::AArch64::ARMV8_8A || ArchKind == llvm::AArch64::ArchKind::ARMV8_8A ||
ArchInfo == llvm::AArch64::ARMV8_9A || ArchKind == llvm::AArch64::ArchKind::ARMV8_9A ||
ArchInfo == llvm::AArch64::ARMV9_1A || ArchKind == llvm::AArch64::ArchKind::ARMV9_1A ||
ArchInfo == llvm::AArch64::ARMV9_2A || ArchKind == llvm::AArch64::ArchKind::ARMV9_2A ||
ArchInfo == llvm::AArch64::ARMV9_3A || ArchKind == llvm::AArch64::ArchKind::ARMV9_3A ||
ArchInfo == llvm::AArch64::ARMV9_4A) && ArchKind == llvm::AArch64::ArchKind::ARMV9_4A) &&
Feature == "sve") Feature == "sve")
Features.push_back("+f32mm"); Features.push_back("+f32mm");
} }
@ -123,8 +123,10 @@ static bool DecodeAArch64Features(const Driver &D, StringRef text,
static bool DecodeAArch64Mcpu(const Driver &D, StringRef Mcpu, StringRef &CPU, static bool DecodeAArch64Mcpu(const Driver &D, StringRef Mcpu, StringRef &CPU,
std::vector<StringRef> &Features) { std::vector<StringRef> &Features) {
std::pair<StringRef, StringRef> Split = Mcpu.split("+"); std::pair<StringRef, StringRef> Split = Mcpu.split("+");
const llvm::AArch64::ArchInfo *ArchInfo = &llvm::AArch64::ARMV8A; CPU = Split.first;
CPU = llvm::AArch64::resolveCPUAlias(Split.first); llvm::AArch64::ArchKind ArchKind = llvm::AArch64::ArchKind::ARMV8A;
CPU = llvm::AArch64::resolveCPUAlias(CPU);
if (CPU == "native") if (CPU == "native")
CPU = llvm::sys::getHostCPUName(); CPU = llvm::sys::getHostCPUName();
@ -132,21 +134,21 @@ static bool DecodeAArch64Mcpu(const Driver &D, StringRef Mcpu, StringRef &CPU,
if (CPU == "generic") { if (CPU == "generic") {
Features.push_back("+neon"); Features.push_back("+neon");
} else { } else {
ArchInfo = &llvm::AArch64::parseCpu(CPU).Arch; ArchKind = llvm::AArch64::parseCPUArch(CPU);
if (*ArchInfo == llvm::AArch64::INVALID) if (ArchKind == llvm::AArch64::ArchKind::INVALID)
return false; return false;
Features.push_back(ArchInfo->ArchFeature); Features.push_back(llvm::AArch64::getArchFeature(ArchKind));
uint64_t Extension = llvm::AArch64::getDefaultExtensions(CPU, *ArchInfo); uint64_t Extension = llvm::AArch64::getDefaultExtensions(CPU, ArchKind);
if (!llvm::AArch64::getExtensionFeatures(Extension, Features)) if (!llvm::AArch64::getExtensionFeatures(Extension, Features))
return false; return false;
} }
if (Split.second.size() && if (Split.second.size() &&
!DecodeAArch64Features(D, Split.second, Features, *ArchInfo)) !DecodeAArch64Features(D, Split.second, Features, ArchKind))
return false; return false;
return true; return true;
} }
static bool static bool
@ -156,26 +158,25 @@ getAArch64ArchFeaturesFromMarch(const Driver &D, StringRef March,
std::string MarchLowerCase = March.lower(); std::string MarchLowerCase = March.lower();
std::pair<StringRef, StringRef> Split = StringRef(MarchLowerCase).split("+"); std::pair<StringRef, StringRef> Split = StringRef(MarchLowerCase).split("+");
const llvm::AArch64::ArchInfo *ArchInfo = llvm::AArch64::ArchKind ArchKind = llvm::AArch64::parseArch(Split.first);
&llvm::AArch64::parseArch(Split.first);
if (Split.first == "native") if (Split.first == "native")
ArchInfo = &llvm::AArch64::getArchForCpu(llvm::sys::getHostCPUName().str()); ArchKind = llvm::AArch64::getCPUArchKind(llvm::sys::getHostCPUName().str());
if (*ArchInfo == llvm::AArch64::INVALID) if (ArchKind == llvm::AArch64::ArchKind::INVALID)
return false; return false;
Features.push_back(ArchInfo->ArchFeature); Features.push_back(llvm::AArch64::getArchFeature(ArchKind));
// Enable SVE2 by default on Armv9-A. // Enable SVE2 by default on Armv9-A.
// It can still be disabled if +nosve2 is present. // It can still be disabled if +nosve2 is present.
// We must do this early so that DecodeAArch64Features has the correct state // We must do this early so that DecodeAArch64Features has the correct state
if ((*ArchInfo == llvm::AArch64::ARMV9A || if ((ArchKind == llvm::AArch64::ArchKind::ARMV9A ||
*ArchInfo == llvm::AArch64::ARMV9_1A || ArchKind == llvm::AArch64::ArchKind::ARMV9_1A ||
*ArchInfo == llvm::AArch64::ARMV9_2A)) { ArchKind == llvm::AArch64::ArchKind::ARMV9_2A)) {
Features.push_back("+sve"); Features.push_back("+sve");
Features.push_back("+sve2"); Features.push_back("+sve2");
} }
if ((Split.second.size() && if ((Split.second.size() &&
!DecodeAArch64Features(D, Split.second, Features, *ArchInfo))) !DecodeAArch64Features(D, Split.second, Features, ArchKind)))
return false; return false;
return true; return true;

View File

@ -13,85 +13,85 @@
// NOTE: NO INCLUDE GUARD DESIRED! // NOTE: NO INCLUDE GUARD DESIRED!
#ifndef AARCH64_ARCH #ifndef AARCH64_ARCH
#define AARCH64_ARCH(MAJOR, MINOR, PROFILE, NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT) #define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT)
#endif #endif
// NOTE: The order and the grouping of the elements matter to make ArchKind iterable. // NOTE: The order and the grouping of the elements matter to make ArchKind iterable.
// List is organised as armv8a -> armv8n-a, armv9a -> armv9m-a and armv8-r. // List is organised as armv8a -> armv8n-a, armv9a -> armv9m-a and armv8-r.
AARCH64_ARCH(0, 0, InvalidProfile, "invalid", INVALID, "+", AARCH64_ARCH("invalid", INVALID, "+",
AArch64::AEK_NONE) AArch64::AEK_NONE)
AARCH64_ARCH(8, 0, AProfile, "armv8-a", ARMV8A, "+v8a", AARCH64_ARCH("armv8-a", ARMV8A, "+v8a",
(AArch64::AEK_CRYPTO | AArch64::AEK_FP | AArch64::AEK_SIMD)) (AArch64::AEK_CRYPTO | AArch64::AEK_FP | AArch64::AEK_SIMD))
AARCH64_ARCH(8, 1, AProfile, "armv8.1-a", ARMV8_1A, "+v8.1a", AARCH64_ARCH("armv8.1-a", ARMV8_1A, "+v8.1a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_LSE | AArch64::AEK_RDM)) AArch64::AEK_SIMD | AArch64::AEK_LSE | AArch64::AEK_RDM))
AARCH64_ARCH(8, 2, AProfile, "armv8.2-a", ARMV8_2A, "+v8.2a", AARCH64_ARCH("armv8.2-a", ARMV8_2A, "+v8.2a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM)) AArch64::AEK_RDM))
AARCH64_ARCH(8, 3, AProfile, "armv8.3-a", ARMV8_3A, "+v8.3a", AARCH64_ARCH("armv8.3-a", ARMV8_3A, "+v8.3a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC)) AArch64::AEK_RDM | AArch64::AEK_RCPC))
AARCH64_ARCH(8, 4, AProfile, "armv8.4-a", ARMV8_4A, "+v8.4a", AARCH64_ARCH("armv8.4-a", ARMV8_4A, "+v8.4a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD)) AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD))
AARCH64_ARCH(8, 5, AProfile, "armv8.5-a", ARMV8_5A, "+v8.5a", AARCH64_ARCH("armv8.5-a", ARMV8_5A, "+v8.5a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD)) AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD))
AARCH64_ARCH(8, 6, AProfile, "armv8.6-a", ARMV8_6A, "+v8.6a", AARCH64_ARCH("armv8.6-a", ARMV8_6A, "+v8.6a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 | AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM)) AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH(8, 7, AProfile, "armv8.7-a", ARMV8_7A, "+v8.7a", AARCH64_ARCH("armv8.7-a", ARMV8_7A, "+v8.7a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 | AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM)) AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH(8, 8, AProfile, "armv8.8-a", ARMV8_8A, "+v8.8a", AARCH64_ARCH("armv8.8-a", ARMV8_8A, "+v8.8a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 | AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM)) AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH(8, 9, AProfile, "armv8.9-a", ARMV8_9A, "+v8.9a", AARCH64_ARCH("armv8.9-a", ARMV8_9A, "+v8.9a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 | AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM)) AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH(9, 0, AProfile, "armv9-a", ARMV9A, "+v9a", AARCH64_ARCH("armv9-a", ARMV9A, "+v9a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SVE2)) AArch64::AEK_SVE2))
AARCH64_ARCH(9, 1, AProfile, "armv9.1-a", ARMV9_1A, "+v9.1a", AARCH64_ARCH("armv9.1-a", ARMV9_1A, "+v9.1a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2)) AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
AARCH64_ARCH(9, 2, AProfile, "armv9.2-a", ARMV9_2A, "+v9.2a", AARCH64_ARCH("armv9.2-a", ARMV9_2A, "+v9.2a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2)) AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
AARCH64_ARCH(9, 3, AProfile, "armv9.3-a", ARMV9_3A, "+v9.3a", AARCH64_ARCH("armv9.3-a", ARMV9_3A, "+v9.3a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2)) AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
AARCH64_ARCH(9, 4, AProfile, "armv9.4-a", ARMV9_4A, "+v9.4a", AARCH64_ARCH("armv9.4-a", ARMV9_4A, "+v9.4a",
(AArch64::AEK_CRC | AArch64::AEK_FP | (AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE | AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD | AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2)) AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
// For v8-R, we do not enable crypto and align with GCC that enables a more // For v8-R, we do not enable crypto and align with GCC that enables a more
// minimal set of optional architecture extensions. // minimal set of optional architecture extensions.
AARCH64_ARCH(8, 0, RProfile, "armv8-r", ARMV8R, "+v8r", AARCH64_ARCH("armv8-r", ARMV8R, "+v8r",
(AArch64::AEK_CRC | AArch64::AEK_RDM | AArch64::AEK_SSBS | (AArch64::AEK_CRC | AArch64::AEK_RDM | AArch64::AEK_SSBS |
AArch64::AEK_DOTPROD | AArch64::AEK_FP | AArch64::AEK_SIMD | AArch64::AEK_DOTPROD | AArch64::AEK_FP | AArch64::AEK_SIMD |
AArch64::AEK_FP16 | AArch64::AEK_FP16FML | AArch64::AEK_RAS | AArch64::AEK_FP16 | AArch64::AEK_FP16FML | AArch64::AEK_RAS |
@ -101,6 +101,7 @@ AARCH64_ARCH(8, 0, RProfile, "armv8-r", ARMV8R, "+v8r",
#ifndef AARCH64_ARCH_EXT_NAME #ifndef AARCH64_ARCH_EXT_NAME
#define AARCH64_ARCH_EXT_NAME(NAME, ID, FEATURE, NEGFEATURE) #define AARCH64_ARCH_EXT_NAME(NAME, ID, FEATURE, NEGFEATURE)
#endif #endif
// FIXME: This would be nicer were it tablegen
AARCH64_ARCH_EXT_NAME("invalid", AArch64::AEK_INVALID, {}, {}) AARCH64_ARCH_EXT_NAME("invalid", AArch64::AEK_INVALID, {}, {})
AARCH64_ARCH_EXT_NAME("none", AArch64::AEK_NONE, {}, {}) AARCH64_ARCH_EXT_NAME("none", AArch64::AEK_NONE, {}, {})
AARCH64_ARCH_EXT_NAME("crc", AArch64::AEK_CRC, "+crc", "-crc") AARCH64_ARCH_EXT_NAME("crc", AArch64::AEK_CRC, "+crc", "-crc")

View File

@ -15,9 +15,9 @@
#define LLVM_SUPPORT_AARCH64TARGETPARSER_H #define LLVM_SUPPORT_AARCH64TARGETPARSER_H
#include "llvm/ADT/StringRef.h" #include "llvm/ADT/StringRef.h"
#include "llvm/Support/VersionTuple.h"
#include <vector> #include <vector>
// FIXME:This should be made into class design,to avoid dupplication.
namespace llvm { namespace llvm {
class Triple; class Triple;
@ -83,129 +83,101 @@ enum ArchExtKind : uint64_t {
AEK_LSE128 = 1ULL << 52, // FEAT_LSE128 AEK_LSE128 = 1ULL << 52, // FEAT_LSE128
}; };
// Represents an extension that can be enabled with -march=<arch>+<extension>. enum class ArchKind {
// Typically these correspond to Arm Architecture extensions, unlike #define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT) ID,
// SubtargetFeature which may represent either an actual extension or some #include "AArch64TargetParser.def"
// internal LLVM property.
struct ExtensionInfo {
StringRef Name; // Human readable name, e.g. "profile".
ArchExtKind ID; // Corresponding to the ArchExtKind, this extensions
// representation in the bitfield.
StringRef Feature; // -mattr enable string, e.g. "+spe"
StringRef NegFeature; // -mattr disable string, e.g. "-spe"
}; };
inline constexpr ExtensionInfo Extensions[] = { struct ArchNames {
StringRef Name;
StringRef ArchFeature;
uint64_t ArchBaseExtensions;
ArchKind ID;
// Return ArchFeature without the leading "+".
StringRef getSubArch() const { return ArchFeature.substr(1); }
};
const ArchNames AArch64ARCHNames[] = {
#define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT) \
{NAME, ARCH_FEATURE, ARCH_BASE_EXT, AArch64::ArchKind::ID},
#include "AArch64TargetParser.def"
};
// List of Arch Extension names.
struct ExtName {
StringRef Name;
uint64_t ID;
StringRef Feature;
StringRef NegFeature;
};
const ExtName AArch64ARCHExtNames[] = {
#define AARCH64_ARCH_EXT_NAME(NAME, ID, FEATURE, NEGFEATURE) \ #define AARCH64_ARCH_EXT_NAME(NAME, ID, FEATURE, NEGFEATURE) \
{NAME, ID, FEATURE, NEGFEATURE}, {NAME, ID, FEATURE, NEGFEATURE},
#include "AArch64TargetParser.def" #include "AArch64TargetParser.def"
}; };
enum ArchProfile { AProfile = 'A', RProfile = 'R', InvalidProfile = '?' }; // List of CPU names and their arches.
// The same CPU can have multiple arches and can be default on multiple arches.
// Information about a specific architecture, e.g. V8.1-A // When finding the Arch for a CPU, first-found prevails. Sort them accordingly.
struct ArchInfo { // When this becomes table-generated, we'd probably need two tables.
VersionTuple Version; // Architecture version, major + minor. struct CpuNames {
ArchProfile Profile; // Architecuture profile StringRef Name;
StringRef Name; // Human readable name, e.g. "armv8.1-a" ArchKind ArchID;
StringRef ArchFeature; // Command line feature flag, e.g. +v8a
uint64_t DefaultExts; // bitfield of default extensions ArchExtKind
// These are not intended to be copied or created outside of this file.
ArchInfo(const ArchInfo &) = delete;
ArchInfo(const ArchInfo &&) = delete;
ArchInfo &operator=(const ArchInfo &rhs) = delete;
ArchInfo &&operator=(const ArchInfo &&rhs) = delete;
// Comparison is done by address. Copies should not exist.
bool operator==(const ArchInfo &Other) const { return this == &Other; }
bool operator!=(const ArchInfo &Other) const { return this != &Other; }
// Defines the following partial order, indicating when an architecture is
// a superset of another:
//
// v9.4a > v9.3a > v9.3a > v9.3a > v9a;
// v v v v v
// v8.9a > v8.8a > v8.7a > v8.6a > v8.5a > v8.4a > ... > v8a;
//
// v8r and INVALID have no relation to anything. This is used to
// determine which features to enable for a given architecture. See
// AArch64TargetInfo::setFeatureEnabled.
bool implies(const ArchInfo &Other) const {
if (this->Profile != Other.Profile)
return false; // ARMV8R and INVALID
if (this->Version.getMajor() == Other.Version.getMajor()) {
return this->Version > Other.Version;
}
if (this->Version.getMajor() == 9 && Other.Version.getMajor() == 8) {
return this->Version.getMinor().value() + 5 >=
Other.Version.getMinor().value();
}
return false;
}
// Return ArchFeature without the leading "+".
constexpr StringRef getSubArch() const { return ArchFeature.substr(1); }
// Search for ArchInfo by SubArch name
static const ArchInfo &findBySubArch(StringRef SubArch);
};
// Create ArchInfo structs named <ID>
#define AARCH64_ARCH(MAJOR, MINOR, PROFILE, NAME, ID, ARCH_FEATURE, \
ARCH_BASE_EXT) \
inline constexpr ArchInfo ID = {VersionTuple{MAJOR, MINOR}, PROFILE, NAME, \
ARCH_FEATURE, ARCH_BASE_EXT};
#include "AArch64TargetParser.def"
#undef AARCH64_ARCH
// The set of all architectures
inline constexpr std::array<const ArchInfo *, 17> ArchInfos = {
#define AARCH64_ARCH(MAJOR, MINOR, PROFILE, NAME, ID, ARCH_FEATURE, \
ARCH_BASE_EXT) \
&ID,
#include "AArch64TargetParser.def"
};
// Details of a specific CPU.
struct CpuInfo {
StringRef Name; // Name, as written for -mcpu.
const ArchInfo &Arch;
uint64_t DefaultExtensions; uint64_t DefaultExtensions;
}; };
inline constexpr CpuInfo CpuInfos[] = { const CpuNames AArch64CPUNames[] = {
#define AARCH64_CPU_NAME(NAME, ARCH_ID, DEFAULT_EXT) \ #define AARCH64_CPU_NAME(NAME, ID, DEFAULT_EXT) \
{NAME, ARCH_ID, DEFAULT_EXT}, {NAME, AArch64::ArchKind::ID, DEFAULT_EXT},
#include "AArch64TargetParser.def" #include "AArch64TargetParser.def"
}; };
// An alias for a CPU. const struct {
struct CpuAlias {
StringRef Alias; StringRef Alias;
StringRef Name; StringRef Name;
}; } AArch64CPUAliases[] = {
inline constexpr CpuAlias CpuAliases[] = {
#define AARCH64_CPU_ALIAS(ALIAS, NAME) {ALIAS, NAME}, #define AARCH64_CPU_ALIAS(ALIAS, NAME) {ALIAS, NAME},
#include "AArch64TargetParser.def" #include "AArch64TargetParser.def"
}; };
const ArchKind ArchKinds[] = {
#define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT) ArchKind::ID,
#include "AArch64TargetParser.def"
};
inline ArchKind &operator--(ArchKind &Kind) {
if ((Kind == ArchKind::INVALID) || (Kind == ArchKind::ARMV8A) ||
(Kind == ArchKind::ARMV9A) || (Kind == ArchKind::ARMV8R))
Kind = ArchKind::INVALID;
else {
unsigned KindAsInteger = static_cast<unsigned>(Kind);
Kind = static_cast<ArchKind>(--KindAsInteger);
}
return Kind;
}
bool getExtensionFeatures(uint64_t Extensions, bool getExtensionFeatures(uint64_t Extensions,
std::vector<StringRef> &Features); std::vector<StringRef> &Features);
StringRef getArchFeature(ArchKind AK);
StringRef getArchName(ArchKind AK);
StringRef getSubArch(ArchKind AK);
StringRef getArchExtName(unsigned ArchExtKind);
StringRef getArchExtFeature(StringRef ArchExt); StringRef getArchExtFeature(StringRef ArchExt);
ArchKind convertV9toV8(ArchKind AK);
StringRef resolveCPUAlias(StringRef CPU); StringRef resolveCPUAlias(StringRef CPU);
// Information by Name // Information by Name
uint64_t getDefaultExtensions(StringRef CPU, const ArchInfo &AI); uint64_t getDefaultExtensions(StringRef CPU, ArchKind AK);
const ArchInfo &getArchForCpu(StringRef CPU); ArchKind getCPUArchKind(StringRef CPU);
ArchKind getSubArchArchKind(StringRef SubArch);
// Parser // Parser
const ArchInfo &parseArch(StringRef Arch); ArchKind parseArch(StringRef Arch);
ArchExtKind parseArchExt(StringRef ArchExt); ArchExtKind parseArchExt(StringRef ArchExt);
// Given the name of a CPU or alias, return the correponding CpuInfo. ArchKind parseCPUArch(StringRef CPU);
const CpuInfo &parseCpu(StringRef Name);
// Used by target parser tests // Used by target parser tests
void fillValidCPUArchList(SmallVectorImpl<StringRef> &Values); void fillValidCPUArchList(SmallVectorImpl<StringRef> &Values);

View File

@ -41,25 +41,24 @@ class VersionTuple {
unsigned HasBuild : 1; unsigned HasBuild : 1;
public: public:
constexpr VersionTuple() VersionTuple()
: Major(0), Minor(0), HasMinor(false), Subminor(0), HasSubminor(false), : Major(0), Minor(0), HasMinor(false), Subminor(0), HasSubminor(false),
Build(0), HasBuild(false) {} Build(0), HasBuild(false) {}
explicit constexpr VersionTuple(unsigned Major) explicit VersionTuple(unsigned Major)
: Major(Major), Minor(0), HasMinor(false), Subminor(0), : Major(Major), Minor(0), HasMinor(false), Subminor(0),
HasSubminor(false), Build(0), HasBuild(false) {} HasSubminor(false), Build(0), HasBuild(false) {}
explicit constexpr VersionTuple(unsigned Major, unsigned Minor) explicit VersionTuple(unsigned Major, unsigned Minor)
: Major(Major), Minor(Minor), HasMinor(true), Subminor(0), : Major(Major), Minor(Minor), HasMinor(true), Subminor(0),
HasSubminor(false), Build(0), HasBuild(false) {} HasSubminor(false), Build(0), HasBuild(false) {}
explicit constexpr VersionTuple(unsigned Major, unsigned Minor, explicit VersionTuple(unsigned Major, unsigned Minor, unsigned Subminor)
unsigned Subminor)
: Major(Major), Minor(Minor), HasMinor(true), Subminor(Subminor), : Major(Major), Minor(Minor), HasMinor(true), Subminor(Subminor),
HasSubminor(true), Build(0), HasBuild(false) {} HasSubminor(true), Build(0), HasBuild(false) {}
explicit constexpr VersionTuple(unsigned Major, unsigned Minor, explicit VersionTuple(unsigned Major, unsigned Minor, unsigned Subminor,
unsigned Subminor, unsigned Build) unsigned Build)
: Major(Major), Minor(Minor), HasMinor(true), Subminor(Subminor), : Major(Major), Minor(Minor), HasMinor(true), Subminor(Subminor),
HasSubminor(true), Build(Build), HasBuild(true) {} HasSubminor(true), Build(Build), HasBuild(true) {}

View File

@ -25,33 +25,34 @@ static unsigned checkArchVersion(llvm::StringRef Arch) {
return 0; return 0;
} }
uint64_t AArch64::getDefaultExtensions(StringRef CPU, uint64_t AArch64::getDefaultExtensions(StringRef CPU, AArch64::ArchKind AK) {
const AArch64::ArchInfo &AI) {
if (CPU == "generic") if (CPU == "generic")
return AI.DefaultExts; return AArch64ARCHNames[static_cast<unsigned>(AK)].ArchBaseExtensions;
return StringSwitch<uint64_t>(CPU) return StringSwitch<uint64_t>(CPU)
#define AARCH64_CPU_NAME(NAME, ARCH_ID, DEFAULT_EXT) \ #define AARCH64_CPU_NAME(NAME, ID, DEFAULT_EXT) \
.Case(NAME, ARCH_ID.DefaultExts | DEFAULT_EXT) .Case(NAME, AArch64ARCHNames[static_cast<unsigned>(ArchKind::ID)] \
.ArchBaseExtensions | \
DEFAULT_EXT)
#include "../../include/llvm/Support/AArch64TargetParser.def" #include "../../include/llvm/Support/AArch64TargetParser.def"
.Default(AArch64::AEK_INVALID); .Default(AArch64::AEK_INVALID);
} }
const AArch64::ArchInfo &AArch64::getArchForCpu(StringRef CPU) { AArch64::ArchKind AArch64::getCPUArchKind(StringRef CPU) {
if (CPU == "generic") if (CPU == "generic")
return ARMV8A; return ArchKind::ARMV8A;
return *StringSwitch<const AArch64::ArchInfo *>(CPU) return StringSwitch<AArch64::ArchKind>(CPU)
#define AARCH64_CPU_NAME(NAME, ARCH_ID, DEFAULT_EXT) .Case(NAME, &ARCH_ID) #define AARCH64_CPU_NAME(NAME, ID, DEFAULT_EXT) .Case(NAME, ArchKind::ID)
#include "../../include/llvm/Support/AArch64TargetParser.def" #include "../../include/llvm/Support/AArch64TargetParser.def"
.Default(&INVALID); .Default(ArchKind::INVALID);
} }
const AArch64::ArchInfo &AArch64::ArchInfo::findBySubArch(StringRef SubArch) { AArch64::ArchKind AArch64::getSubArchArchKind(StringRef SubArch) {
for (const auto *A : AArch64::ArchInfos) for (const auto &A : AArch64ARCHNames)
if (A->getSubArch() == SubArch) if (A.getSubArch() == SubArch)
return *A; return A.ID;
return AArch64::INVALID; return ArchKind::INVALID;
} }
bool AArch64::getExtensionFeatures(uint64_t Extensions, bool AArch64::getExtensionFeatures(uint64_t Extensions,
@ -79,27 +80,53 @@ StringRef AArch64::resolveCPUAlias(StringRef CPU) {
.Default(CPU); .Default(CPU);
} }
StringRef AArch64::getArchFeature(AArch64::ArchKind AK) {
return AArch64ARCHNames[static_cast<unsigned>(AK)].ArchFeature;
}
StringRef AArch64::getArchName(AArch64::ArchKind AK) {
return AArch64ARCHNames[static_cast<unsigned>(AK)].Name;
}
StringRef AArch64::getSubArch(AArch64::ArchKind AK) {
return AArch64ARCHNames[static_cast<unsigned>(AK)].getSubArch();
}
StringRef AArch64::getArchExtFeature(StringRef ArchExt) { StringRef AArch64::getArchExtFeature(StringRef ArchExt) {
if (ArchExt.startswith("no")) { if (ArchExt.startswith("no")) {
StringRef ArchExtBase(ArchExt.substr(2)); StringRef ArchExtBase(ArchExt.substr(2));
for (const auto &AE : Extensions) { for (const auto &AE : AArch64ARCHExtNames) {
if (!AE.NegFeature.empty() && ArchExtBase == AE.Name) if (!AE.NegFeature.empty() && ArchExtBase == AE.Name)
return AE.NegFeature; return AE.NegFeature;
} }
} }
for (const auto &AE : Extensions) for (const auto &AE : AArch64ARCHExtNames)
if (!AE.Feature.empty() && ArchExt == AE.Name) if (!AE.Feature.empty() && ArchExt == AE.Name)
return AE.Feature; return AE.Feature;
return StringRef(); return StringRef();
} }
void AArch64::fillValidCPUArchList(SmallVectorImpl<StringRef> &Values) { AArch64::ArchKind AArch64::convertV9toV8(AArch64::ArchKind AK) {
for (const auto &C : CpuInfos) if (AK == AArch64::ArchKind::INVALID)
if (C.Arch != INVALID) return AK;
Values.push_back(C.Name); if (AK < AArch64::ArchKind::ARMV9A)
return AK;
if (AK >= AArch64::ArchKind::ARMV8R)
return AArch64::ArchKind::INVALID;
unsigned AK_v8 = static_cast<unsigned>(AArch64::ArchKind::ARMV8_5A);
AK_v8 += static_cast<unsigned>(AK) -
static_cast<unsigned>(AArch64::ArchKind::ARMV9A);
return static_cast<AArch64::ArchKind>(AK_v8);
}
for (const auto &Alias : CpuAliases) void AArch64::fillValidCPUArchList(SmallVectorImpl<StringRef> &Values) {
for (const auto &Arch : AArch64CPUNames) {
if (Arch.ArchID != ArchKind::INVALID)
Values.push_back(Arch.Name);
}
for (const auto &Alias: AArch64CPUAliases)
Values.push_back(Alias.Alias); Values.push_back(Alias.Alias);
} }
@ -109,37 +136,39 @@ bool AArch64::isX18ReservedByDefault(const Triple &TT) {
} }
// Allows partial match, ex. "v8a" matches "armv8a". // Allows partial match, ex. "v8a" matches "armv8a".
const AArch64::ArchInfo &AArch64::parseArch(StringRef Arch) { AArch64::ArchKind AArch64::parseArch(StringRef Arch) {
Arch = llvm::ARM::getCanonicalArchName(Arch); Arch = llvm::ARM::getCanonicalArchName(Arch);
if (checkArchVersion(Arch) < 8) if (checkArchVersion(Arch) < 8)
return AArch64::INVALID; return ArchKind::INVALID;
StringRef Syn = llvm::ARM::getArchSynonym(Arch); StringRef Syn = llvm::ARM::getArchSynonym(Arch);
for (const auto *A : ArchInfos) { for (const auto &A : AArch64ARCHNames) {
if (A->Name.endswith(Syn)) if (A.Name.endswith(Syn))
return *A; return A.ID;
} }
return AArch64::INVALID; return ArchKind::INVALID;
} }
AArch64::ArchExtKind AArch64::parseArchExt(StringRef ArchExt) { AArch64::ArchExtKind AArch64::parseArchExt(StringRef ArchExt) {
for (const auto &A : Extensions) { for (const auto &A : AArch64ARCHExtNames) {
if (ArchExt == A.Name) if (ArchExt == A.Name)
return static_cast<ArchExtKind>(A.ID); return static_cast<ArchExtKind>(A.ID);
} }
return AArch64::AEK_INVALID; return AArch64::AEK_INVALID;
} }
const AArch64::CpuInfo &AArch64::parseCpu(StringRef Name) { AArch64::ArchKind AArch64::parseCPUArch(StringRef CPU) {
// Resolve aliases first. // Resolve aliases first.
Name = resolveCPUAlias(Name); for (const auto &Alias : AArch64CPUAliases) {
if (CPU == Alias.Alias) {
CPU = Alias.Name;
break;
}
}
// Then find the CPU name. // Then find the CPU name.
for (const auto &C : CpuInfos) for (const auto &C : AArch64CPUNames)
if (Name == C.Name) if (CPU == C.Name)
return C; return C.ArchID;
// "generic" returns invalid. return ArchKind::INVALID;
assert(Name != "invalid" && "Unexpected recursion.");
return parseCpu("invalid");
} }

View File

@ -6806,48 +6806,67 @@ bool AArch64AsmParser::ParseDirective(AsmToken DirectiveID) {
return false; return false;
} }
static void ExpandCryptoAEK(const AArch64::ArchInfo &ArchInfo, static void ExpandCryptoAEK(AArch64::ArchKind ArchKind,
SmallVector<StringRef, 4> &RequestedExtensions) { SmallVector<StringRef, 4> &RequestedExtensions) {
const bool NoCrypto = llvm::is_contained(RequestedExtensions, "nocrypto"); const bool NoCrypto = llvm::is_contained(RequestedExtensions, "nocrypto");
const bool Crypto = llvm::is_contained(RequestedExtensions, "crypto"); const bool Crypto = llvm::is_contained(RequestedExtensions, "crypto");
if (!NoCrypto && Crypto) { if (!NoCrypto && Crypto) {
// Map 'generic' (and others) to sha2 and aes, because switch (ArchKind) {
// that was the traditional meaning of crypto. default:
if (ArchInfo == AArch64::ARMV8_1A || ArchInfo == AArch64::ARMV8_2A || // Map 'generic' (and others) to sha2 and aes, because
ArchInfo == AArch64::ARMV8_3A) { // that was the traditional meaning of crypto.
case AArch64::ArchKind::ARMV8_1A:
case AArch64::ArchKind::ARMV8_2A:
case AArch64::ArchKind::ARMV8_3A:
RequestedExtensions.push_back("sha2"); RequestedExtensions.push_back("sha2");
RequestedExtensions.push_back("aes"); RequestedExtensions.push_back("aes");
} break;
if (ArchInfo == AArch64::ARMV8_4A || ArchInfo == AArch64::ARMV8_5A || case AArch64::ArchKind::ARMV8_4A:
ArchInfo == AArch64::ARMV8_6A || ArchInfo == AArch64::ARMV8_7A || case AArch64::ArchKind::ARMV8_5A:
ArchInfo == AArch64::ARMV8_8A || ArchInfo == AArch64::ARMV8_9A || case AArch64::ArchKind::ARMV8_6A:
ArchInfo == AArch64::ARMV9A || ArchInfo == AArch64::ARMV9_1A || case AArch64::ArchKind::ARMV8_7A:
ArchInfo == AArch64::ARMV9_2A || ArchInfo == AArch64::ARMV9_3A || case AArch64::ArchKind::ARMV8_8A:
ArchInfo == AArch64::ARMV9_4A || ArchInfo == AArch64::ARMV8R) { case AArch64::ArchKind::ARMV8_9A:
case AArch64::ArchKind::ARMV9A:
case AArch64::ArchKind::ARMV9_1A:
case AArch64::ArchKind::ARMV9_2A:
case AArch64::ArchKind::ARMV9_3A:
case AArch64::ArchKind::ARMV9_4A:
case AArch64::ArchKind::ARMV8R:
RequestedExtensions.push_back("sm4"); RequestedExtensions.push_back("sm4");
RequestedExtensions.push_back("sha3"); RequestedExtensions.push_back("sha3");
RequestedExtensions.push_back("sha2"); RequestedExtensions.push_back("sha2");
RequestedExtensions.push_back("aes"); RequestedExtensions.push_back("aes");
break;
} }
} else if (NoCrypto) { } else if (NoCrypto) {
// Map 'generic' (and others) to sha2 and aes, because switch (ArchKind) {
// that was the traditional meaning of crypto. default:
if (ArchInfo == AArch64::ARMV8_1A || ArchInfo == AArch64::ARMV8_2A || // Map 'generic' (and others) to sha2 and aes, because
ArchInfo == AArch64::ARMV8_3A) { // that was the traditional meaning of crypto.
case AArch64::ArchKind::ARMV8_1A:
case AArch64::ArchKind::ARMV8_2A:
case AArch64::ArchKind::ARMV8_3A:
RequestedExtensions.push_back("nosha2"); RequestedExtensions.push_back("nosha2");
RequestedExtensions.push_back("noaes"); RequestedExtensions.push_back("noaes");
} break;
if (ArchInfo == AArch64::ARMV8_4A || ArchInfo == AArch64::ARMV8_5A || case AArch64::ArchKind::ARMV8_4A:
ArchInfo == AArch64::ARMV8_6A || ArchInfo == AArch64::ARMV8_7A || case AArch64::ArchKind::ARMV8_5A:
ArchInfo == AArch64::ARMV8_8A || ArchInfo == AArch64::ARMV8_9A || case AArch64::ArchKind::ARMV8_6A:
ArchInfo == AArch64::ARMV9A || ArchInfo == AArch64::ARMV9_1A || case AArch64::ArchKind::ARMV8_7A:
ArchInfo == AArch64::ARMV9_2A || ArchInfo == AArch64::ARMV9_3A || case AArch64::ArchKind::ARMV8_8A:
ArchInfo == AArch64::ARMV9_4A) { case AArch64::ArchKind::ARMV8_9A:
case AArch64::ArchKind::ARMV9A:
case AArch64::ArchKind::ARMV9_1A:
case AArch64::ArchKind::ARMV9_2A:
case AArch64::ArchKind::ARMV9_3A:
case AArch64::ArchKind::ARMV9_4A:
RequestedExtensions.push_back("nosm4"); RequestedExtensions.push_back("nosm4");
RequestedExtensions.push_back("nosha3"); RequestedExtensions.push_back("nosha3");
RequestedExtensions.push_back("nosha2"); RequestedExtensions.push_back("nosha2");
RequestedExtensions.push_back("noaes"); RequestedExtensions.push_back("noaes");
break;
} }
} }
} }
@ -6861,8 +6880,8 @@ bool AArch64AsmParser::parseDirectiveArch(SMLoc L) {
std::tie(Arch, ExtensionString) = std::tie(Arch, ExtensionString) =
getParser().parseStringToEndOfStatement().trim().split('+'); getParser().parseStringToEndOfStatement().trim().split('+');
const AArch64::ArchInfo &ArchInfo = AArch64::parseArch(Arch); AArch64::ArchKind ID = AArch64::parseArch(Arch);
if (ArchInfo == AArch64::INVALID) if (ID == AArch64::ArchKind::INVALID)
return Error(ArchLoc, "unknown arch name"); return Error(ArchLoc, "unknown arch name");
if (parseToken(AsmToken::EndOfStatement)) if (parseToken(AsmToken::EndOfStatement))
@ -6870,9 +6889,9 @@ bool AArch64AsmParser::parseDirectiveArch(SMLoc L) {
// Get the architecture and extension features. // Get the architecture and extension features.
std::vector<StringRef> AArch64Features; std::vector<StringRef> AArch64Features;
AArch64Features.push_back(ArchInfo.ArchFeature); AArch64Features.push_back(AArch64::getArchFeature(ID));
AArch64::getExtensionFeatures( AArch64::getExtensionFeatures(AArch64::getDefaultExtensions("generic", ID),
AArch64::getDefaultExtensions("generic", ArchInfo), AArch64Features); AArch64Features);
MCSubtargetInfo &STI = copySTI(); MCSubtargetInfo &STI = copySTI();
std::vector<std::string> ArchFeatures(AArch64Features.begin(), AArch64Features.end()); std::vector<std::string> ArchFeatures(AArch64Features.begin(), AArch64Features.end());
@ -6883,7 +6902,7 @@ bool AArch64AsmParser::parseDirectiveArch(SMLoc L) {
if (!ExtensionString.empty()) if (!ExtensionString.empty())
ExtensionString.split(RequestedExtensions, '+'); ExtensionString.split(RequestedExtensions, '+');
ExpandCryptoAEK(ArchInfo, RequestedExtensions); ExpandCryptoAEK(ID, RequestedExtensions);
FeatureBitset Features = STI.getFeatureBits(); FeatureBitset Features = STI.getFeatureBits();
for (auto Name : RequestedExtensions) { for (auto Name : RequestedExtensions) {
@ -6979,7 +6998,7 @@ bool AArch64AsmParser::parseDirectiveCPU(SMLoc L) {
STI.setDefaultFeatures(CPU, /*TuneCPU*/ CPU, ""); STI.setDefaultFeatures(CPU, /*TuneCPU*/ CPU, "");
CurLoc = incrementLoc(CurLoc, CPU.size()); CurLoc = incrementLoc(CurLoc, CPU.size());
ExpandCryptoAEK(llvm::AArch64::getArchForCpu(CPU), RequestedExtensions); ExpandCryptoAEK(llvm::AArch64::getCPUArchKind(CPU), RequestedExtensions);
for (auto Name : RequestedExtensions) { for (auto Name : RequestedExtensions) {
// Advance source location past '+'. // Advance source location past '+'.

View File

@ -952,11 +952,11 @@ class AArch64CPUTestFixture
TEST_P(AArch64CPUTestFixture, testAArch64CPU) { TEST_P(AArch64CPUTestFixture, testAArch64CPU) {
ARMCPUTestParams params = GetParam(); ARMCPUTestParams params = GetParam();
const AArch64::ArchInfo &AI = AArch64::parseCpu(params.CPUName).Arch; AArch64::ArchKind AK = AArch64::parseCPUArch(params.CPUName);
EXPECT_EQ(params.ExpectedArch, AI.Name); EXPECT_EQ(params.ExpectedArch, AArch64::getArchName(AK));
uint64_t default_extensions = uint64_t default_extensions =
AArch64::getDefaultExtensions(params.CPUName, AI); AArch64::getDefaultExtensions(params.CPUName, AK);
EXPECT_PRED_FORMAT2(AssertSameExtensionFlags<ARM::ISAKind::AARCH64>, EXPECT_PRED_FORMAT2(AssertSameExtensionFlags<ARM::ISAKind::AARCH64>,
params.ExpectedFlags, default_extensions); params.ExpectedFlags, default_extensions);
} }
@ -1402,14 +1402,14 @@ TEST(TargetParserTest, testAArch64CPUArchList) {
// valid, and match the expected 'magic' count. // valid, and match the expected 'magic' count.
EXPECT_EQ(List.size(), NumAArch64CPUArchs); EXPECT_EQ(List.size(), NumAArch64CPUArchs);
for(StringRef CPU : List) { for(StringRef CPU : List) {
EXPECT_NE(AArch64::parseCpu(CPU).Arch, AArch64::INVALID); EXPECT_NE(AArch64::parseCPUArch(CPU), AArch64::ArchKind::INVALID);
} }
} }
bool testAArch64Arch(StringRef Arch, StringRef DefaultCPU, StringRef SubArch, bool testAArch64Arch(StringRef Arch, StringRef DefaultCPU, StringRef SubArch,
unsigned ArchAttr) { unsigned ArchAttr) {
const AArch64::ArchInfo &AI = AArch64::parseArch(Arch); AArch64::ArchKind AK = AArch64::parseArch(Arch);
return AI != AArch64::INVALID; return AK != AArch64::ArchKind::INVALID;
} }
TEST(TargetParserTest, testAArch64Arch) { TEST(TargetParserTest, testAArch64Arch) {
@ -1445,81 +1445,148 @@ TEST(TargetParserTest, testAArch64Arch) {
ARMBuildAttrs::CPUArch::v8_A)); ARMBuildAttrs::CPUArch::v8_A));
} }
bool testAArch64Extension(StringRef CPUName, const AArch64::ArchInfo &AI, bool testAArch64Extension(StringRef CPUName, AArch64::ArchKind AK,
StringRef ArchExt) { StringRef ArchExt) {
return AArch64::getDefaultExtensions(CPUName, AI) & return AArch64::getDefaultExtensions(CPUName, AK) &
AArch64::parseArchExt(ArchExt); AArch64::parseArchExt(ArchExt);
} }
TEST(TargetParserTest, testAArch64Extension) { TEST(TargetParserTest, testAArch64Extension) {
EXPECT_FALSE(testAArch64Extension("cortex-a34", AArch64::INVALID, "ras")); EXPECT_FALSE(testAArch64Extension("cortex-a34",
EXPECT_FALSE(testAArch64Extension("cortex-a35", AArch64::INVALID, "ras")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a53", AArch64::INVALID, "ras")); EXPECT_FALSE(testAArch64Extension("cortex-a35",
EXPECT_TRUE(testAArch64Extension("cortex-a55", AArch64::INVALID, "ras")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a55", AArch64::INVALID, "fp16")); EXPECT_FALSE(testAArch64Extension("cortex-a53",
EXPECT_FALSE(testAArch64Extension("cortex-a55", AArch64::INVALID, "fp16fml")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a55", AArch64::INVALID, "dotprod")); EXPECT_TRUE(testAArch64Extension("cortex-a55",
EXPECT_FALSE(testAArch64Extension("cortex-a57", AArch64::INVALID, "ras")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a72", AArch64::INVALID, "ras")); EXPECT_TRUE(testAArch64Extension("cortex-a55",
EXPECT_FALSE(testAArch64Extension("cortex-a73", AArch64::INVALID, "ras")); AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("cortex-a75", AArch64::INVALID, "ras")); EXPECT_FALSE(testAArch64Extension("cortex-a55",
EXPECT_TRUE(testAArch64Extension("cortex-a75", AArch64::INVALID, "fp16")); AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_FALSE(testAArch64Extension("cortex-a75", AArch64::INVALID, "fp16fml")); EXPECT_TRUE(testAArch64Extension("cortex-a55",
EXPECT_TRUE(testAArch64Extension("cortex-a75", AArch64::INVALID, "dotprod")); AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "ras")); EXPECT_FALSE(testAArch64Extension("cortex-a57",
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "fp16")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "fp16fml")); EXPECT_FALSE(testAArch64Extension("cortex-a72",
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "dotprod")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "lse")); EXPECT_FALSE(testAArch64Extension("cortex-a73",
EXPECT_FALSE(testAArch64Extension("cyclone", AArch64::INVALID, "ras")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("exynos-m3", AArch64::INVALID, "ras")); EXPECT_TRUE(testAArch64Extension("cortex-a75",
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "dotprod")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "fp16")); EXPECT_TRUE(testAArch64Extension("cortex-a75",
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "lse")); AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "ras")); EXPECT_FALSE(testAArch64Extension("cortex-a75",
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "rdm")); AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "dotprod")); EXPECT_TRUE(testAArch64Extension("cortex-a75",
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "fp16")); AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "lse")); EXPECT_TRUE(testAArch64Extension("cortex-r82",
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "ras")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "rdm")); EXPECT_TRUE(testAArch64Extension("cortex-r82",
EXPECT_TRUE(testAArch64Extension("falkor", AArch64::INVALID, "rdm")); AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("kryo", AArch64::INVALID, "ras")); EXPECT_TRUE(testAArch64Extension("cortex-r82",
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "crc")); AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "lse")); EXPECT_TRUE(testAArch64Extension("cortex-r82",
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "rdm")); AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "ras")); EXPECT_TRUE(testAArch64Extension("cortex-r82",
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "rcpc")); AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "profile")); EXPECT_FALSE(testAArch64Extension("cyclone",
EXPECT_FALSE(testAArch64Extension("saphira", AArch64::INVALID, "fp16")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("thunderx2t99", AArch64::INVALID, "ras")); EXPECT_FALSE(testAArch64Extension("exynos-m3",
EXPECT_FALSE(testAArch64Extension("thunderx", AArch64::INVALID, "lse")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("thunderxt81", AArch64::INVALID, "lse")); EXPECT_TRUE(testAArch64Extension("exynos-m4",
EXPECT_FALSE(testAArch64Extension("thunderxt83", AArch64::INVALID, "lse")); AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_FALSE(testAArch64Extension("thunderxt88", AArch64::INVALID, "lse")); EXPECT_TRUE(testAArch64Extension("exynos-m4",
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "crypto")); AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("tsv110", AArch64::INVALID, "sha3")); EXPECT_TRUE(testAArch64Extension("exynos-m4",
EXPECT_FALSE(testAArch64Extension("tsv110", AArch64::INVALID, "sm4")); AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "ras")); EXPECT_TRUE(testAArch64Extension("exynos-m4",
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "profile")); AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "fp16")); EXPECT_TRUE(testAArch64Extension("exynos-m4",
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "fp16fml")); AArch64::ArchKind::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "dotprod")); EXPECT_TRUE(testAArch64Extension("exynos-m5",
EXPECT_TRUE(testAArch64Extension("a64fx", AArch64::INVALID, "fp16")); AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("a64fx", AArch64::INVALID, "sve")); EXPECT_TRUE(testAArch64Extension("exynos-m5",
EXPECT_FALSE(testAArch64Extension("a64fx", AArch64::INVALID, "sve2")); AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("carmel", AArch64::INVALID, "crypto")); EXPECT_TRUE(testAArch64Extension("exynos-m5",
EXPECT_TRUE(testAArch64Extension("carmel", AArch64::INVALID, "fp16")); AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("exynos-m5",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m5",
AArch64::ArchKind::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("falkor",
AArch64::ArchKind::INVALID, "rdm"));
EXPECT_FALSE(testAArch64Extension("kryo",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "crc"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "rcpc"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "profile"));
EXPECT_FALSE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("thunderx2t99",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("thunderx",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt81",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt83",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt88",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "crypto"));
EXPECT_FALSE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "sha3"));
EXPECT_FALSE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "sm4"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "profile"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("a64fx",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("a64fx",
AArch64::ArchKind::INVALID, "sve"));
EXPECT_FALSE(testAArch64Extension("a64fx",
AArch64::ArchKind::INVALID, "sve2"));
EXPECT_TRUE(
testAArch64Extension("carmel", AArch64::ArchKind::INVALID, "crypto"));
EXPECT_TRUE(
testAArch64Extension("carmel", AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8A, "ras")); EXPECT_FALSE(testAArch64Extension(
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_1A, "ras")); "generic", AArch64::ArchKind::ARMV8A, "ras"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_2A, "profile")); EXPECT_FALSE(testAArch64Extension(
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_2A, "fp16")); "generic", AArch64::ArchKind::ARMV8_1A, "ras"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_2A, "fp16fml")); EXPECT_FALSE(testAArch64Extension(
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_3A, "fp16")); "generic", AArch64::ArchKind::ARMV8_2A, "profile"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_3A, "fp16fml")); EXPECT_FALSE(testAArch64Extension(
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_4A, "fp16")); "generic", AArch64::ArchKind::ARMV8_2A, "fp16"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_4A, "fp16fml")); EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_2A, "fp16fml"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_3A, "fp16"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_3A, "fp16fml"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_4A, "fp16"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_4A, "fp16fml"));
} }
TEST(TargetParserTest, AArch64ExtensionFeatures) { TEST(TargetParserTest, AArch64ExtensionFeatures) {
@ -1641,62 +1708,25 @@ TEST(TargetParserTest, AArch64ArchFeatures) {
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8R), "+v8r"); EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8R), "+v8r");
} }
TEST(TargetParserTest, AArch64ArchPartialOrder) { TEST(TargetParserTest, AArch64ArchV9toV8Conversion) {
EXPECT_FALSE(AArch64::INVALID.implies(AArch64::INVALID)); for (auto AK : AArch64::ArchKinds) {
if (AK == AArch64::ArchKind::INVALID)
for (const auto *A : AArch64::ArchInfos) { EXPECT_EQ(AK, AArch64::convertV9toV8(AK));
EXPECT_EQ(*A, *A); else if (AK < AArch64::ArchKind::ARMV9A)
if (!(*A == *A)) { EXPECT_EQ(AK, AArch64::convertV9toV8(AK));
EXPECT_NE(*A, *A); else if (AK >= AArch64::ArchKind::ARMV8R)
} EXPECT_EQ(AArch64::ArchKind::INVALID, AArch64::convertV9toV8(AK));
// Comparison with invalid is always false else
EXPECT_FALSE(A->implies(AArch64::INVALID)); EXPECT_TRUE(AArch64::convertV9toV8(AK) < AArch64::ArchKind::ARMV9A);
EXPECT_FALSE(AArch64::INVALID.implies(*A));
// v8r has no relation to other valid architectures
if (*A != AArch64::ARMV8R) {
EXPECT_FALSE(A->implies(AArch64::ARMV8R));
EXPECT_FALSE(AArch64::ARMV8R.implies(*A));
}
} }
EXPECT_EQ(AArch64::ArchKind::ARMV8_5A,
for (const auto *A : { AArch64::convertV9toV8(AArch64::ArchKind::ARMV9A));
&AArch64::ARMV8_1A, EXPECT_EQ(AArch64::ArchKind::ARMV8_6A,
&AArch64::ARMV8_2A, AArch64::convertV9toV8(AArch64::ArchKind::ARMV9_1A));
&AArch64::ARMV8_3A, EXPECT_EQ(AArch64::ArchKind::ARMV8_7A,
&AArch64::ARMV8_4A, AArch64::convertV9toV8(AArch64::ArchKind::ARMV9_2A));
&AArch64::ARMV8_5A, EXPECT_EQ(AArch64::ArchKind::ARMV8_8A,
&AArch64::ARMV8_6A, AArch64::convertV9toV8(AArch64::ArchKind::ARMV9_3A));
&AArch64::ARMV8_7A,
&AArch64::ARMV8_8A,
&AArch64::ARMV8_9A,
})
EXPECT_TRUE(A->implies(AArch64::ARMV8A));
for (const auto *A : {&AArch64::ARMV9_1A, &AArch64::ARMV9_2A,
&AArch64::ARMV9_3A, &AArch64::ARMV9_4A})
EXPECT_TRUE(A->implies(AArch64::ARMV9A));
EXPECT_TRUE(AArch64::ARMV8_1A.implies(AArch64::ARMV8A));
EXPECT_TRUE(AArch64::ARMV8_2A.implies(AArch64::ARMV8_1A));
EXPECT_TRUE(AArch64::ARMV8_3A.implies(AArch64::ARMV8_2A));
EXPECT_TRUE(AArch64::ARMV8_4A.implies(AArch64::ARMV8_3A));
EXPECT_TRUE(AArch64::ARMV8_5A.implies(AArch64::ARMV8_4A));
EXPECT_TRUE(AArch64::ARMV8_6A.implies(AArch64::ARMV8_5A));
EXPECT_TRUE(AArch64::ARMV8_7A.implies(AArch64::ARMV8_6A));
EXPECT_TRUE(AArch64::ARMV8_8A.implies(AArch64::ARMV8_7A));
EXPECT_TRUE(AArch64::ARMV8_9A.implies(AArch64::ARMV8_8A));
EXPECT_TRUE(AArch64::ARMV9_1A.implies(AArch64::ARMV9A));
EXPECT_TRUE(AArch64::ARMV9_2A.implies(AArch64::ARMV9_1A));
EXPECT_TRUE(AArch64::ARMV9_3A.implies(AArch64::ARMV9_2A));
EXPECT_TRUE(AArch64::ARMV9_4A.implies(AArch64::ARMV9_3A));
EXPECT_TRUE(AArch64::ARMV9A.implies(AArch64::ARMV8_5A));
EXPECT_TRUE(AArch64::ARMV9_1A.implies(AArch64::ARMV8_6A));
EXPECT_TRUE(AArch64::ARMV9_2A.implies(AArch64::ARMV8_7A));
EXPECT_TRUE(AArch64::ARMV9_3A.implies(AArch64::ARMV8_8A));
EXPECT_TRUE(AArch64::ARMV9_4A.implies(AArch64::ARMV8_9A));
} }
TEST(TargetParserTest, AArch64ArchExtFeature) { TEST(TargetParserTest, AArch64ArchExtFeature) {