[AArch64] Improve TargetParser API

Re-land with constexpr StringRef::substr():

The TargetParser depends heavily on a collection of macros and enums to tie
together information about architectures, CPUs and extensions. Over time this
has led to some pretty awkward API choices. For example, recently a custom
operator-- has been added to the enum, which effectively turns iteration into
a graph traversal and makes the ordering of the macro calls in the header
significant. More generally there is a lot of string <-> enum conversion
going on. I think this shows the extent to which the current data structures
are constraining us, and the need for a rethink.

Key changes:

 - Get rid of Arch enum, which is used to bind fields together. Instead of
   passing around ArchKind, use the named ArchInfo objects directly or via
   references.

 - The list of all known ArchInfo becomes an array of pointers.

 - ArchKind::operator-- is replaced with ArchInfo::implies(), which defines
   which architectures are predecessors to each other. This allows features
   from predecessor architectures to be added in a more intuitive way.

 - Free functions of the form f(ArchKind) are converted to ArchInfo::f(). Some
   functions become unnecessary and are deleted.

 - Version number and profile are added to the ArchInfo. This makes comparison
   of architectures easier and moves a couple of functions out of clang and
   into AArch64TargetParser.

 - clang::AArch64TargetInfo ArchInfo is initialised to Armv8a not INVALID.

 - AArch64::ArchProfile which is distinct from ARM::ArchProfile

 - Give things sensible names and add some comments.

Differential Revision: https://reviews.llvm.org/D138792
This commit is contained in:
Tomas Matheson 2022-11-18 11:20:05 +00:00
parent 54ebf1c4a1
commit e83f1502f1
10 changed files with 446 additions and 524 deletions

View File

@ -45,28 +45,6 @@ const Builtin::Info AArch64TargetInfo::BuiltinInfo[] = {
#include "clang/Basic/BuiltinsAArch64.def"
};
static StringRef getArchVersionString(llvm::AArch64::ArchKind Kind) {
switch (Kind) {
case llvm::AArch64::ArchKind::ARMV9A:
case llvm::AArch64::ArchKind::ARMV9_1A:
case llvm::AArch64::ArchKind::ARMV9_2A:
case llvm::AArch64::ArchKind::ARMV9_3A:
case llvm::AArch64::ArchKind::ARMV9_4A:
return "9";
default:
return "8";
}
}
StringRef AArch64TargetInfo::getArchProfile() const {
switch (ArchKind) {
case llvm::AArch64::ArchKind::ARMV8R:
return "R";
default:
return "A";
}
}
AArch64TargetInfo::AArch64TargetInfo(const llvm::Triple &Triple,
const TargetOptions &Opts)
: TargetInfo(Triple), ABI("aapcs") {
@ -170,7 +148,7 @@ bool AArch64TargetInfo::validateBranchProtection(StringRef Spec, StringRef,
bool AArch64TargetInfo::isValidCPUName(StringRef Name) const {
return Name == "generic" ||
llvm::AArch64::parseCPUArch(Name) != llvm::AArch64::ArchKind::INVALID;
llvm::AArch64::parseCpu(Name).Arch != llvm::AArch64::INVALID;
}
bool AArch64TargetInfo::setCPU(const std::string &Name) {
@ -298,8 +276,10 @@ void AArch64TargetInfo::getTargetDefines(const LangOptions &Opts,
// ACLE predefines. Many can only have one possible value on v8 AArch64.
Builder.defineMacro("__ARM_ACLE", "200");
Builder.defineMacro("__ARM_ARCH", getArchVersionString(ArchKind));
Builder.defineMacro("__ARM_ARCH_PROFILE", "'" + getArchProfile() + "'");
Builder.defineMacro("__ARM_ARCH",
std::to_string(ArchInfo->Version.getMajor()));
Builder.defineMacro("__ARM_ARCH_PROFILE",
std::string("'") + (char)ArchInfo->Profile + "'");
Builder.defineMacro("__ARM_64BIT_STATE", "1");
Builder.defineMacro("__ARM_PCS_AAPCS64", "1");
@ -464,52 +444,34 @@ void AArch64TargetInfo::getTargetDefines(const LangOptions &Opts,
if (HasD128)
Builder.defineMacro("__ARM_FEATURE_SYSREG128", "1");
switch (ArchKind) {
default:
break;
case llvm::AArch64::ArchKind::ARMV8_1A:
if (*ArchInfo == llvm::AArch64::ARMV8_1A)
getTargetDefinesARMV81A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_2A:
if (*ArchInfo == llvm::AArch64::ARMV8_2A)
getTargetDefinesARMV82A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_3A:
if (*ArchInfo == llvm::AArch64::ARMV8_3A)
getTargetDefinesARMV83A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_4A:
if (*ArchInfo == llvm::AArch64::ARMV8_4A)
getTargetDefinesARMV84A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_5A:
if (*ArchInfo == llvm::AArch64::ARMV8_5A)
getTargetDefinesARMV85A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_6A:
if (*ArchInfo == llvm::AArch64::ARMV8_6A)
getTargetDefinesARMV86A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_7A:
if (*ArchInfo == llvm::AArch64::ARMV8_7A)
getTargetDefinesARMV87A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_8A:
if (*ArchInfo == llvm::AArch64::ARMV8_8A)
getTargetDefinesARMV88A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV8_9A:
if (*ArchInfo == llvm::AArch64::ARMV8_9A)
getTargetDefinesARMV89A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV9A:
if (*ArchInfo == llvm::AArch64::ARMV9A)
getTargetDefinesARMV9A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV9_1A:
if (*ArchInfo == llvm::AArch64::ARMV9_1A)
getTargetDefinesARMV91A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV9_2A:
if (*ArchInfo == llvm::AArch64::ARMV9_2A)
getTargetDefinesARMV92A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV9_3A:
if (*ArchInfo == llvm::AArch64::ARMV9_3A)
getTargetDefinesARMV93A(Opts, Builder);
break;
case llvm::AArch64::ArchKind::ARMV9_4A:
if (*ArchInfo == llvm::AArch64::ARMV9_4A)
getTargetDefinesARMV94A(Opts, Builder);
break;
}
// All of the __sync_(bool|val)_compare_and_swap_(1|2|4|8) builtins work.
Builder.defineMacro("__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1");
@ -559,17 +521,17 @@ bool AArch64TargetInfo::hasFeature(StringRef Feature) const {
void AArch64TargetInfo::setFeatureEnabled(llvm::StringMap<bool> &Features,
StringRef Name, bool Enabled) const {
Features[Name] = Enabled;
llvm::AArch64::ArchKind AK = llvm::AArch64::getSubArchArchKind(Name);
// Add all previous architecture versions.
// In case of v9.x the v8.x counterparts are added too.
if ("9" == getArchVersionString(AK))
for (llvm::AArch64::ArchKind I = llvm::AArch64::convertV9toV8(AK);
I != llvm::AArch64::ArchKind::INVALID; --I)
Features[llvm::AArch64::getSubArch(I)] = Enabled;
// If this "feature" is an architecture, also add features for all previous
// architecture versions. In case of v9.x the v8.x counterparts are added too.
const llvm::AArch64::ArchInfo &ArchInfo =
llvm::AArch64::ArchInfo::findBySubArch(Name);
for (llvm::AArch64::ArchKind I = --AK; I != llvm::AArch64::ArchKind::INVALID;
--I)
Features[llvm::AArch64::getSubArch(I)] = Enabled;
if (ArchInfo == llvm::AArch64::INVALID)
return; // Not an architecure, nothing more to do.
for (const auto *OtherArch : llvm::AArch64::ArchInfos)
if (ArchInfo.implies(*OtherArch))
Features[OtherArch->getSubArch()] = Enabled;
}
bool AArch64TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
@ -602,8 +564,6 @@ bool AArch64TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
HasD128 = false;
HasRCPC = false;
ArchKind = llvm::AArch64::ArchKind::INVALID;
for (const auto &Feature : Features) {
if (Feature == "+neon")
FPU |= NeonMode;
@ -665,38 +625,51 @@ bool AArch64TargetInfo::handleTargetFeatures(std::vector<std::string> &Features,
if (Feature == "+strict-align")
HasUnaligned = false;
// All predecessor archs are added but select the latest one for ArchKind.
if (Feature == "+v8a" && ArchKind < llvm::AArch64::ArchKind::ARMV8A)
ArchKind = llvm::AArch64::ArchKind::ARMV8A;
if (Feature == "+v8.1a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_1A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_1A;
if (Feature == "+v8.2a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_2A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_2A;
if (Feature == "+v8.3a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_3A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_3A;
if (Feature == "+v8.4a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_4A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_4A;
if (Feature == "+v8.5a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_5A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_5A;
if (Feature == "+v8.6a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_6A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_6A;
if (Feature == "+v8.7a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_7A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_7A;
if (Feature == "+v8.8a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_8A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_8A;
if (Feature == "+v8.9a" && ArchKind < llvm::AArch64::ArchKind::ARMV8_9A)
ArchKind = llvm::AArch64::ArchKind::ARMV8_9A;
if (Feature == "+v9a" && ArchKind < llvm::AArch64::ArchKind::ARMV9A)
ArchKind = llvm::AArch64::ArchKind::ARMV9A;
if (Feature == "+v9.1a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_1A)
ArchKind = llvm::AArch64::ArchKind::ARMV9_1A;
if (Feature == "+v9.2a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_2A)
ArchKind = llvm::AArch64::ArchKind::ARMV9_2A;
if (Feature == "+v9.3a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_3A)
ArchKind = llvm::AArch64::ArchKind::ARMV9_3A;
if (Feature == "+v9.4a" && ArchKind < llvm::AArch64::ArchKind::ARMV9_4A)
ArchKind = llvm::AArch64::ArchKind::ARMV9_4A;
if (Feature == "+v8a" && ArchInfo->Version < llvm::AArch64::ARMV8A.Version)
ArchInfo = &llvm::AArch64::ARMV8A;
if (Feature == "+v8.1a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_1A.Version)
ArchInfo = &llvm::AArch64::ARMV8_1A;
if (Feature == "+v8.2a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_2A.Version)
ArchInfo = &llvm::AArch64::ARMV8_2A;
if (Feature == "+v8.3a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_3A.Version)
ArchInfo = &llvm::AArch64::ARMV8_3A;
if (Feature == "+v8.4a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_4A.Version)
ArchInfo = &llvm::AArch64::ARMV8_4A;
if (Feature == "+v8.5a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_5A.Version)
ArchInfo = &llvm::AArch64::ARMV8_5A;
if (Feature == "+v8.6a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_6A.Version)
ArchInfo = &llvm::AArch64::ARMV8_6A;
if (Feature == "+v8.7a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_7A.Version)
ArchInfo = &llvm::AArch64::ARMV8_7A;
if (Feature == "+v8.8a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_8A.Version)
ArchInfo = &llvm::AArch64::ARMV8_8A;
if (Feature == "+v8.9a" &&
ArchInfo->Version < llvm::AArch64::ARMV8_9A.Version)
ArchInfo = &llvm::AArch64::ARMV8_9A;
if (Feature == "+v9a" && ArchInfo->Version < llvm::AArch64::ARMV9A.Version)
ArchInfo = &llvm::AArch64::ARMV9A;
if (Feature == "+v9.1a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_1A.Version)
ArchInfo = &llvm::AArch64::ARMV9_1A;
if (Feature == "+v9.2a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_2A.Version)
ArchInfo = &llvm::AArch64::ARMV9_2A;
if (Feature == "+v9.3a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_3A.Version)
ArchInfo = &llvm::AArch64::ARMV9_3A;
if (Feature == "+v9.4a" &&
ArchInfo->Version < llvm::AArch64::ARMV9_4A.Version)
ArchInfo = &llvm::AArch64::ARMV9_4A;
if (Feature == "+v8r")
ArchKind = llvm::AArch64::ArchKind::ARMV8R;
ArchInfo = &llvm::AArch64::ARMV8R;
if (Feature == "+fullfp16")
HasFullFP16 = true;
if (Feature == "+dotprod")
@ -744,8 +717,8 @@ bool AArch64TargetInfo::initFeatureMap(
llvm::StringMap<bool> &Features, DiagnosticsEngine &Diags, StringRef CPU,
const std::vector<std::string> &FeaturesVec) const {
// Parse the CPU and add any implied features.
llvm::AArch64::ArchKind Arch = llvm::AArch64::parseCPUArch(CPU);
if (Arch != llvm::AArch64::ArchKind::INVALID) {
const llvm::AArch64::ArchInfo &Arch = llvm::AArch64::parseCpu(CPU).Arch;
if (Arch != llvm::AArch64::INVALID) {
uint64_t Exts = llvm::AArch64::getDefaultExtensions(CPU, Arch);
std::vector<StringRef> CPUFeats;
llvm::AArch64::getExtensionFeatures(Exts, CPUFeats);
@ -806,13 +779,13 @@ ParsedTargetAttr AArch64TargetInfo::parseTargetAttr(StringRef Features) const {
FoundArch = true;
std::pair<StringRef, StringRef> Split =
Feature.split("=").second.trim().split("+");
llvm::AArch64::ArchKind ArchKind = llvm::AArch64::parseArch(Split.first);
const llvm::AArch64::ArchInfo &AI = llvm::AArch64::parseArch(Split.first);
// Parse the architecture version, adding the required features to
// Ret.Features.
if (ArchKind == llvm::AArch64::ArchKind::INVALID)
if (AI == llvm::AArch64::INVALID)
continue;
Ret.Features.push_back(llvm::AArch64::getArchFeature(ArchKind).str());
Ret.Features.push_back(AI.ArchFeature.str());
// Add any extra features, after the +
SplitAndAddFeatures(Split.second, Ret.Features);
} else if (Feature.startswith("cpu=")) {

View File

@ -56,12 +56,11 @@ class LLVM_LIBRARY_VISIBILITY AArch64TargetInfo : public TargetInfo {
bool HasD128;
bool HasRCPC;
llvm::AArch64::ArchKind ArchKind;
const llvm::AArch64::ArchInfo *ArchInfo = &llvm::AArch64::ARMV8A;
static const Builtin::Info BuiltinInfo[];
std::string ABI;
StringRef getArchProfile() const;
public:
AArch64TargetInfo(const llvm::Triple &Triple, const TargetOptions &Opts);

View File

@ -70,7 +70,7 @@ std::string aarch64::getAArch64TargetCPU(const ArgList &Args,
// Decode AArch64 features from string like +[no]featureA+[no]featureB+...
static bool DecodeAArch64Features(const Driver &D, StringRef text,
std::vector<StringRef> &Features,
llvm::AArch64::ArchKind ArchKind) {
const llvm::AArch64::ArchInfo &ArchInfo) {
SmallVector<StringRef, 8> Split;
text.split(Split, StringRef("+"), -1, false);
@ -104,14 +104,14 @@ static bool DecodeAArch64Features(const Driver &D, StringRef text,
// +sve implies +f32mm if the base architecture is >= v8.6A (except v9A)
// It isn't the case in general that sve implies both f64mm and f32mm
if ((ArchKind == llvm::AArch64::ArchKind::ARMV8_6A ||
ArchKind == llvm::AArch64::ArchKind::ARMV8_7A ||
ArchKind == llvm::AArch64::ArchKind::ARMV8_8A ||
ArchKind == llvm::AArch64::ArchKind::ARMV8_9A ||
ArchKind == llvm::AArch64::ArchKind::ARMV9_1A ||
ArchKind == llvm::AArch64::ArchKind::ARMV9_2A ||
ArchKind == llvm::AArch64::ArchKind::ARMV9_3A ||
ArchKind == llvm::AArch64::ArchKind::ARMV9_4A) &&
if ((ArchInfo == llvm::AArch64::ARMV8_6A ||
ArchInfo == llvm::AArch64::ARMV8_7A ||
ArchInfo == llvm::AArch64::ARMV8_8A ||
ArchInfo == llvm::AArch64::ARMV8_9A ||
ArchInfo == llvm::AArch64::ARMV9_1A ||
ArchInfo == llvm::AArch64::ARMV9_2A ||
ArchInfo == llvm::AArch64::ARMV9_3A ||
ArchInfo == llvm::AArch64::ARMV9_4A) &&
Feature == "sve")
Features.push_back("+f32mm");
}
@ -123,10 +123,8 @@ static bool DecodeAArch64Features(const Driver &D, StringRef text,
static bool DecodeAArch64Mcpu(const Driver &D, StringRef Mcpu, StringRef &CPU,
std::vector<StringRef> &Features) {
std::pair<StringRef, StringRef> Split = Mcpu.split("+");
CPU = Split.first;
llvm::AArch64::ArchKind ArchKind = llvm::AArch64::ArchKind::ARMV8A;
CPU = llvm::AArch64::resolveCPUAlias(CPU);
const llvm::AArch64::ArchInfo *ArchInfo = &llvm::AArch64::ARMV8A;
CPU = llvm::AArch64::resolveCPUAlias(Split.first);
if (CPU == "native")
CPU = llvm::sys::getHostCPUName();
@ -134,21 +132,21 @@ static bool DecodeAArch64Mcpu(const Driver &D, StringRef Mcpu, StringRef &CPU,
if (CPU == "generic") {
Features.push_back("+neon");
} else {
ArchKind = llvm::AArch64::parseCPUArch(CPU);
if (ArchKind == llvm::AArch64::ArchKind::INVALID)
ArchInfo = &llvm::AArch64::parseCpu(CPU).Arch;
if (*ArchInfo == llvm::AArch64::INVALID)
return false;
Features.push_back(llvm::AArch64::getArchFeature(ArchKind));
Features.push_back(ArchInfo->ArchFeature);
uint64_t Extension = llvm::AArch64::getDefaultExtensions(CPU, ArchKind);
uint64_t Extension = llvm::AArch64::getDefaultExtensions(CPU, *ArchInfo);
if (!llvm::AArch64::getExtensionFeatures(Extension, Features))
return false;
}
}
if (Split.second.size() &&
!DecodeAArch64Features(D, Split.second, Features, ArchKind))
return false;
if (Split.second.size() &&
!DecodeAArch64Features(D, Split.second, Features, *ArchInfo))
return false;
return true;
return true;
}
static bool
@ -158,25 +156,26 @@ getAArch64ArchFeaturesFromMarch(const Driver &D, StringRef March,
std::string MarchLowerCase = March.lower();
std::pair<StringRef, StringRef> Split = StringRef(MarchLowerCase).split("+");
llvm::AArch64::ArchKind ArchKind = llvm::AArch64::parseArch(Split.first);
const llvm::AArch64::ArchInfo *ArchInfo =
&llvm::AArch64::parseArch(Split.first);
if (Split.first == "native")
ArchKind = llvm::AArch64::getCPUArchKind(llvm::sys::getHostCPUName().str());
if (ArchKind == llvm::AArch64::ArchKind::INVALID)
ArchInfo = &llvm::AArch64::getArchForCpu(llvm::sys::getHostCPUName().str());
if (*ArchInfo == llvm::AArch64::INVALID)
return false;
Features.push_back(llvm::AArch64::getArchFeature(ArchKind));
Features.push_back(ArchInfo->ArchFeature);
// Enable SVE2 by default on Armv9-A.
// It can still be disabled if +nosve2 is present.
// We must do this early so that DecodeAArch64Features has the correct state
if ((ArchKind == llvm::AArch64::ArchKind::ARMV9A ||
ArchKind == llvm::AArch64::ArchKind::ARMV9_1A ||
ArchKind == llvm::AArch64::ArchKind::ARMV9_2A)) {
if ((*ArchInfo == llvm::AArch64::ARMV9A ||
*ArchInfo == llvm::AArch64::ARMV9_1A ||
*ArchInfo == llvm::AArch64::ARMV9_2A)) {
Features.push_back("+sve");
Features.push_back("+sve2");
}
if ((Split.second.size() &&
!DecodeAArch64Features(D, Split.second, Features, ArchKind)))
!DecodeAArch64Features(D, Split.second, Features, *ArchInfo)))
return false;
return true;

View File

@ -561,7 +561,8 @@ namespace llvm {
/// \param N The number of characters to included in the substring. If N
/// exceeds the number of characters remaining in the string, the string
/// suffix (starting with \p Start) will be returned.
[[nodiscard]] StringRef substr(size_t Start, size_t N = npos) const {
[[nodiscard]] constexpr StringRef substr(size_t Start,
size_t N = npos) const {
Start = std::min(Start, Length);
return StringRef(Data + Start, std::min(N, Length - Start));
}

View File

@ -13,85 +13,85 @@
// NOTE: NO INCLUDE GUARD DESIRED!
#ifndef AARCH64_ARCH
#define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT)
#define AARCH64_ARCH(MAJOR, MINOR, PROFILE, NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT)
#endif
// NOTE: The order and the grouping of the elements matter to make ArchKind iterable.
// List is organised as armv8a -> armv8n-a, armv9a -> armv9m-a and armv8-r.
AARCH64_ARCH("invalid", INVALID, "+",
AARCH64_ARCH(0, 0, InvalidProfile, "invalid", INVALID, "+",
AArch64::AEK_NONE)
AARCH64_ARCH("armv8-a", ARMV8A, "+v8a",
AARCH64_ARCH(8, 0, AProfile, "armv8-a", ARMV8A, "+v8a",
(AArch64::AEK_CRYPTO | AArch64::AEK_FP | AArch64::AEK_SIMD))
AARCH64_ARCH("armv8.1-a", ARMV8_1A, "+v8.1a",
AARCH64_ARCH(8, 1, AProfile, "armv8.1-a", ARMV8_1A, "+v8.1a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_LSE | AArch64::AEK_RDM))
AARCH64_ARCH("armv8.2-a", ARMV8_2A, "+v8.2a",
AARCH64_ARCH(8, 2, AProfile, "armv8.2-a", ARMV8_2A, "+v8.2a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM))
AARCH64_ARCH("armv8.3-a", ARMV8_3A, "+v8.3a",
AARCH64_ARCH(8, 3, AProfile, "armv8.3-a", ARMV8_3A, "+v8.3a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC))
AARCH64_ARCH("armv8.4-a", ARMV8_4A, "+v8.4a",
AARCH64_ARCH(8, 4, AProfile, "armv8.4-a", ARMV8_4A, "+v8.4a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD))
AARCH64_ARCH("armv8.5-a", ARMV8_5A, "+v8.5a",
AARCH64_ARCH(8, 5, AProfile, "armv8.5-a", ARMV8_5A, "+v8.5a",
(AArch64::AEK_CRC | AArch64::AEK_CRYPTO | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD))
AARCH64_ARCH("armv8.6-a", ARMV8_6A, "+v8.6a",
AARCH64_ARCH(8, 6, AProfile, "armv8.6-a", ARMV8_6A, "+v8.6a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH("armv8.7-a", ARMV8_7A, "+v8.7a",
AARCH64_ARCH(8, 7, AProfile, "armv8.7-a", ARMV8_7A, "+v8.7a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH("armv8.8-a", ARMV8_8A, "+v8.8a",
AARCH64_ARCH(8, 8, AProfile, "armv8.8-a", ARMV8_8A, "+v8.8a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH("armv8.9-a", ARMV8_9A, "+v8.9a",
AARCH64_ARCH(8, 9, AProfile, "armv8.9-a", ARMV8_9A, "+v8.9a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SM4 | AArch64::AEK_SHA3 | AArch64::AEK_BF16 |
AArch64::AEK_SHA2 | AArch64::AEK_AES | AArch64::AEK_I8MM))
AARCH64_ARCH("armv9-a", ARMV9A, "+v9a",
AARCH64_ARCH(9, 0, AProfile, "armv9-a", ARMV9A, "+v9a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_SVE2))
AARCH64_ARCH("armv9.1-a", ARMV9_1A, "+v9.1a",
AARCH64_ARCH(9, 1, AProfile, "armv9.1-a", ARMV9_1A, "+v9.1a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
AARCH64_ARCH("armv9.2-a", ARMV9_2A, "+v9.2a",
AARCH64_ARCH(9, 2, AProfile, "armv9.2-a", ARMV9_2A, "+v9.2a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
AARCH64_ARCH("armv9.3-a", ARMV9_3A, "+v9.3a",
AARCH64_ARCH(9, 3, AProfile, "armv9.3-a", ARMV9_3A, "+v9.3a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
AARCH64_ARCH("armv9.4-a", ARMV9_4A, "+v9.4a",
AARCH64_ARCH(9, 4, AProfile, "armv9.4-a", ARMV9_4A, "+v9.4a",
(AArch64::AEK_CRC | AArch64::AEK_FP |
AArch64::AEK_SIMD | AArch64::AEK_RAS | AArch64::AEK_LSE |
AArch64::AEK_RDM | AArch64::AEK_RCPC | AArch64::AEK_DOTPROD |
AArch64::AEK_BF16 | AArch64::AEK_I8MM | AArch64::AEK_SVE2))
// For v8-R, we do not enable crypto and align with GCC that enables a more
// minimal set of optional architecture extensions.
AARCH64_ARCH("armv8-r", ARMV8R, "+v8r",
AARCH64_ARCH(8, 0, RProfile, "armv8-r", ARMV8R, "+v8r",
(AArch64::AEK_CRC | AArch64::AEK_RDM | AArch64::AEK_SSBS |
AArch64::AEK_DOTPROD | AArch64::AEK_FP | AArch64::AEK_SIMD |
AArch64::AEK_FP16 | AArch64::AEK_FP16FML | AArch64::AEK_RAS |
@ -101,7 +101,6 @@ AARCH64_ARCH("armv8-r", ARMV8R, "+v8r",
#ifndef AARCH64_ARCH_EXT_NAME
#define AARCH64_ARCH_EXT_NAME(NAME, ID, FEATURE, NEGFEATURE)
#endif
// FIXME: This would be nicer were it tablegen
AARCH64_ARCH_EXT_NAME("invalid", AArch64::AEK_INVALID, {}, {})
AARCH64_ARCH_EXT_NAME("none", AArch64::AEK_NONE, {}, {})
AARCH64_ARCH_EXT_NAME("crc", AArch64::AEK_CRC, "+crc", "-crc")

View File

@ -15,9 +15,9 @@
#define LLVM_SUPPORT_AARCH64TARGETPARSER_H
#include "llvm/ADT/StringRef.h"
#include "llvm/Support/VersionTuple.h"
#include <vector>
// FIXME:This should be made into class design,to avoid dupplication.
namespace llvm {
class Triple;
@ -83,101 +83,129 @@ enum ArchExtKind : uint64_t {
AEK_LSE128 = 1ULL << 52, // FEAT_LSE128
};
enum class ArchKind {
#define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT) ID,
#include "AArch64TargetParser.def"
// Represents an extension that can be enabled with -march=<arch>+<extension>.
// Typically these correspond to Arm Architecture extensions, unlike
// SubtargetFeature which may represent either an actual extension or some
// internal LLVM property.
struct ExtensionInfo {
StringRef Name; // Human readable name, e.g. "profile".
ArchExtKind ID; // Corresponding to the ArchExtKind, this extensions
// representation in the bitfield.
StringRef Feature; // -mattr enable string, e.g. "+spe"
StringRef NegFeature; // -mattr disable string, e.g. "-spe"
};
struct ArchNames {
StringRef Name;
StringRef ArchFeature;
uint64_t ArchBaseExtensions;
ArchKind ID;
// Return ArchFeature without the leading "+".
StringRef getSubArch() const { return ArchFeature.substr(1); }
};
const ArchNames AArch64ARCHNames[] = {
#define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT) \
{NAME, ARCH_FEATURE, ARCH_BASE_EXT, AArch64::ArchKind::ID},
#include "AArch64TargetParser.def"
};
// List of Arch Extension names.
struct ExtName {
StringRef Name;
uint64_t ID;
StringRef Feature;
StringRef NegFeature;
};
const ExtName AArch64ARCHExtNames[] = {
inline constexpr ExtensionInfo Extensions[] = {
#define AARCH64_ARCH_EXT_NAME(NAME, ID, FEATURE, NEGFEATURE) \
{NAME, ID, FEATURE, NEGFEATURE},
#include "AArch64TargetParser.def"
};
// List of CPU names and their arches.
// The same CPU can have multiple arches and can be default on multiple arches.
// When finding the Arch for a CPU, first-found prevails. Sort them accordingly.
// When this becomes table-generated, we'd probably need two tables.
struct CpuNames {
StringRef Name;
ArchKind ArchID;
uint64_t DefaultExtensions;
enum ArchProfile { AProfile = 'A', RProfile = 'R', InvalidProfile = '?' };
// Information about a specific architecture, e.g. V8.1-A
struct ArchInfo {
VersionTuple Version; // Architecture version, major + minor.
ArchProfile Profile; // Architecuture profile
StringRef Name; // Human readable name, e.g. "armv8.1-a"
StringRef ArchFeature; // Command line feature flag, e.g. +v8a
uint64_t DefaultExts; // bitfield of default extensions ArchExtKind
// These are not intended to be copied or created outside of this file.
ArchInfo(const ArchInfo &) = delete;
ArchInfo(const ArchInfo &&) = delete;
ArchInfo &operator=(const ArchInfo &rhs) = delete;
ArchInfo &&operator=(const ArchInfo &&rhs) = delete;
// Comparison is done by address. Copies should not exist.
bool operator==(const ArchInfo &Other) const { return this == &Other; }
bool operator!=(const ArchInfo &Other) const { return this != &Other; }
// Defines the following partial order, indicating when an architecture is
// a superset of another:
//
// v9.4a > v9.3a > v9.3a > v9.3a > v9a;
// v v v v v
// v8.9a > v8.8a > v8.7a > v8.6a > v8.5a > v8.4a > ... > v8a;
//
// v8r and INVALID have no relation to anything. This is used to
// determine which features to enable for a given architecture. See
// AArch64TargetInfo::setFeatureEnabled.
bool implies(const ArchInfo &Other) const {
if (this->Profile != Other.Profile)
return false; // ARMV8R and INVALID
if (this->Version.getMajor() == Other.Version.getMajor()) {
return this->Version > Other.Version;
}
if (this->Version.getMajor() == 9 && Other.Version.getMajor() == 8) {
return this->Version.getMinor().value() + 5 >=
Other.Version.getMinor().value();
}
return false;
}
// Return ArchFeature without the leading "+".
StringRef getSubArch() const { return ArchFeature.substr(1); }
// Search for ArchInfo by SubArch name
static const ArchInfo &findBySubArch(StringRef SubArch);
};
const CpuNames AArch64CPUNames[] = {
#define AARCH64_CPU_NAME(NAME, ID, DEFAULT_EXT) \
{NAME, AArch64::ArchKind::ID, DEFAULT_EXT},
// Create ArchInfo structs named <ID>
#define AARCH64_ARCH(MAJOR, MINOR, PROFILE, NAME, ID, ARCH_FEATURE, \
ARCH_BASE_EXT) \
inline constexpr ArchInfo ID = {VersionTuple{MAJOR, MINOR}, PROFILE, NAME, \
ARCH_FEATURE, ARCH_BASE_EXT};
#include "AArch64TargetParser.def"
#undef AARCH64_ARCH
// The set of all architectures
inline constexpr std::array<const ArchInfo *, 17> ArchInfos = {
#define AARCH64_ARCH(MAJOR, MINOR, PROFILE, NAME, ID, ARCH_FEATURE, \
ARCH_BASE_EXT) \
&ID,
#include "AArch64TargetParser.def"
};
const struct {
// Details of a specific CPU.
struct CpuInfo {
StringRef Name; // Name, as written for -mcpu.
const ArchInfo &Arch;
uint64_t DefaultExtensions;
};
inline constexpr CpuInfo CpuInfos[] = {
#define AARCH64_CPU_NAME(NAME, ARCH_ID, DEFAULT_EXT) \
{NAME, ARCH_ID, DEFAULT_EXT},
#include "AArch64TargetParser.def"
};
// An alias for a CPU.
struct CpuAlias {
StringRef Alias;
StringRef Name;
} AArch64CPUAliases[] = {
};
inline constexpr CpuAlias CpuAliases[] = {
#define AARCH64_CPU_ALIAS(ALIAS, NAME) {ALIAS, NAME},
#include "AArch64TargetParser.def"
};
const ArchKind ArchKinds[] = {
#define AARCH64_ARCH(NAME, ID, ARCH_FEATURE, ARCH_BASE_EXT) ArchKind::ID,
#include "AArch64TargetParser.def"
};
inline ArchKind &operator--(ArchKind &Kind) {
if ((Kind == ArchKind::INVALID) || (Kind == ArchKind::ARMV8A) ||
(Kind == ArchKind::ARMV9A) || (Kind == ArchKind::ARMV8R))
Kind = ArchKind::INVALID;
else {
unsigned KindAsInteger = static_cast<unsigned>(Kind);
Kind = static_cast<ArchKind>(--KindAsInteger);
}
return Kind;
}
bool getExtensionFeatures(uint64_t Extensions,
std::vector<StringRef> &Features);
StringRef getArchFeature(ArchKind AK);
StringRef getArchName(ArchKind AK);
StringRef getSubArch(ArchKind AK);
StringRef getArchExtName(unsigned ArchExtKind);
StringRef getArchExtFeature(StringRef ArchExt);
ArchKind convertV9toV8(ArchKind AK);
StringRef resolveCPUAlias(StringRef CPU);
// Information by Name
uint64_t getDefaultExtensions(StringRef CPU, ArchKind AK);
ArchKind getCPUArchKind(StringRef CPU);
ArchKind getSubArchArchKind(StringRef SubArch);
uint64_t getDefaultExtensions(StringRef CPU, const ArchInfo &AI);
const ArchInfo &getArchForCpu(StringRef CPU);
// Parser
ArchKind parseArch(StringRef Arch);
const ArchInfo &parseArch(StringRef Arch);
ArchExtKind parseArchExt(StringRef ArchExt);
ArchKind parseCPUArch(StringRef CPU);
// Given the name of a CPU or alias, return the correponding CpuInfo.
const CpuInfo &parseCpu(StringRef Name);
// Used by target parser tests
void fillValidCPUArchList(SmallVectorImpl<StringRef> &Values);

View File

@ -41,24 +41,25 @@ class VersionTuple {
unsigned HasBuild : 1;
public:
VersionTuple()
constexpr VersionTuple()
: Major(0), Minor(0), HasMinor(false), Subminor(0), HasSubminor(false),
Build(0), HasBuild(false) {}
explicit VersionTuple(unsigned Major)
explicit constexpr VersionTuple(unsigned Major)
: Major(Major), Minor(0), HasMinor(false), Subminor(0),
HasSubminor(false), Build(0), HasBuild(false) {}
explicit VersionTuple(unsigned Major, unsigned Minor)
explicit constexpr VersionTuple(unsigned Major, unsigned Minor)
: Major(Major), Minor(Minor), HasMinor(true), Subminor(0),
HasSubminor(false), Build(0), HasBuild(false) {}
explicit VersionTuple(unsigned Major, unsigned Minor, unsigned Subminor)
explicit constexpr VersionTuple(unsigned Major, unsigned Minor,
unsigned Subminor)
: Major(Major), Minor(Minor), HasMinor(true), Subminor(Subminor),
HasSubminor(true), Build(0), HasBuild(false) {}
explicit VersionTuple(unsigned Major, unsigned Minor, unsigned Subminor,
unsigned Build)
explicit constexpr VersionTuple(unsigned Major, unsigned Minor,
unsigned Subminor, unsigned Build)
: Major(Major), Minor(Minor), HasMinor(true), Subminor(Subminor),
HasSubminor(true), Build(Build), HasBuild(true) {}

View File

@ -25,34 +25,33 @@ static unsigned checkArchVersion(llvm::StringRef Arch) {
return 0;
}
uint64_t AArch64::getDefaultExtensions(StringRef CPU, AArch64::ArchKind AK) {
uint64_t AArch64::getDefaultExtensions(StringRef CPU,
const AArch64::ArchInfo &AI) {
if (CPU == "generic")
return AArch64ARCHNames[static_cast<unsigned>(AK)].ArchBaseExtensions;
return AI.DefaultExts;
return StringSwitch<uint64_t>(CPU)
#define AARCH64_CPU_NAME(NAME, ID, DEFAULT_EXT) \
.Case(NAME, AArch64ARCHNames[static_cast<unsigned>(ArchKind::ID)] \
.ArchBaseExtensions | \
DEFAULT_EXT)
#define AARCH64_CPU_NAME(NAME, ARCH_ID, DEFAULT_EXT) \
.Case(NAME, ARCH_ID.DefaultExts | DEFAULT_EXT)
#include "../../include/llvm/Support/AArch64TargetParser.def"
.Default(AArch64::AEK_INVALID);
}
AArch64::ArchKind AArch64::getCPUArchKind(StringRef CPU) {
const AArch64::ArchInfo &AArch64::getArchForCpu(StringRef CPU) {
if (CPU == "generic")
return ArchKind::ARMV8A;
return ARMV8A;
return StringSwitch<AArch64::ArchKind>(CPU)
#define AARCH64_CPU_NAME(NAME, ID, DEFAULT_EXT) .Case(NAME, ArchKind::ID)
return *StringSwitch<const AArch64::ArchInfo *>(CPU)
#define AARCH64_CPU_NAME(NAME, ARCH_ID, DEFAULT_EXT) .Case(NAME, &ARCH_ID)
#include "../../include/llvm/Support/AArch64TargetParser.def"
.Default(ArchKind::INVALID);
.Default(&INVALID);
}
AArch64::ArchKind AArch64::getSubArchArchKind(StringRef SubArch) {
for (const auto &A : AArch64ARCHNames)
if (A.getSubArch() == SubArch)
return A.ID;
return ArchKind::INVALID;
const AArch64::ArchInfo &AArch64::ArchInfo::findBySubArch(StringRef SubArch) {
for (const auto *A : AArch64::ArchInfos)
if (A->getSubArch() == SubArch)
return *A;
return AArch64::INVALID;
}
bool AArch64::getExtensionFeatures(uint64_t Extensions,
@ -80,53 +79,27 @@ StringRef AArch64::resolveCPUAlias(StringRef CPU) {
.Default(CPU);
}
StringRef AArch64::getArchFeature(AArch64::ArchKind AK) {
return AArch64ARCHNames[static_cast<unsigned>(AK)].ArchFeature;
}
StringRef AArch64::getArchName(AArch64::ArchKind AK) {
return AArch64ARCHNames[static_cast<unsigned>(AK)].Name;
}
StringRef AArch64::getSubArch(AArch64::ArchKind AK) {
return AArch64ARCHNames[static_cast<unsigned>(AK)].getSubArch();
}
StringRef AArch64::getArchExtFeature(StringRef ArchExt) {
if (ArchExt.startswith("no")) {
StringRef ArchExtBase(ArchExt.substr(2));
for (const auto &AE : AArch64ARCHExtNames) {
for (const auto &AE : Extensions) {
if (!AE.NegFeature.empty() && ArchExtBase == AE.Name)
return AE.NegFeature;
}
}
for (const auto &AE : AArch64ARCHExtNames)
for (const auto &AE : Extensions)
if (!AE.Feature.empty() && ArchExt == AE.Name)
return AE.Feature;
return StringRef();
}
AArch64::ArchKind AArch64::convertV9toV8(AArch64::ArchKind AK) {
if (AK == AArch64::ArchKind::INVALID)
return AK;
if (AK < AArch64::ArchKind::ARMV9A)
return AK;
if (AK >= AArch64::ArchKind::ARMV8R)
return AArch64::ArchKind::INVALID;
unsigned AK_v8 = static_cast<unsigned>(AArch64::ArchKind::ARMV8_5A);
AK_v8 += static_cast<unsigned>(AK) -
static_cast<unsigned>(AArch64::ArchKind::ARMV9A);
return static_cast<AArch64::ArchKind>(AK_v8);
}
void AArch64::fillValidCPUArchList(SmallVectorImpl<StringRef> &Values) {
for (const auto &Arch : AArch64CPUNames) {
if (Arch.ArchID != ArchKind::INVALID)
Values.push_back(Arch.Name);
}
for (const auto &C : CpuInfos)
if (C.Arch != INVALID)
Values.push_back(C.Name);
for (const auto &Alias: AArch64CPUAliases)
for (const auto &Alias : CpuAliases)
Values.push_back(Alias.Alias);
}
@ -136,39 +109,37 @@ bool AArch64::isX18ReservedByDefault(const Triple &TT) {
}
// Allows partial match, ex. "v8a" matches "armv8a".
AArch64::ArchKind AArch64::parseArch(StringRef Arch) {
const AArch64::ArchInfo &AArch64::parseArch(StringRef Arch) {
Arch = llvm::ARM::getCanonicalArchName(Arch);
if (checkArchVersion(Arch) < 8)
return ArchKind::INVALID;
return AArch64::INVALID;
StringRef Syn = llvm::ARM::getArchSynonym(Arch);
for (const auto &A : AArch64ARCHNames) {
if (A.Name.endswith(Syn))
return A.ID;
for (const auto *A : ArchInfos) {
if (A->Name.endswith(Syn))
return *A;
}
return ArchKind::INVALID;
return AArch64::INVALID;
}
AArch64::ArchExtKind AArch64::parseArchExt(StringRef ArchExt) {
for (const auto &A : AArch64ARCHExtNames) {
for (const auto &A : Extensions) {
if (ArchExt == A.Name)
return static_cast<ArchExtKind>(A.ID);
}
return AArch64::AEK_INVALID;
}
AArch64::ArchKind AArch64::parseCPUArch(StringRef CPU) {
const AArch64::CpuInfo &AArch64::parseCpu(StringRef Name) {
// Resolve aliases first.
for (const auto &Alias : AArch64CPUAliases) {
if (CPU == Alias.Alias) {
CPU = Alias.Name;
break;
}
}
// Then find the CPU name.
for (const auto &C : AArch64CPUNames)
if (CPU == C.Name)
return C.ArchID;
Name = resolveCPUAlias(Name);
return ArchKind::INVALID;
// Then find the CPU name.
for (const auto &C : CpuInfos)
if (Name == C.Name)
return C;
// "generic" returns invalid.
assert(Name != "invalid" && "Unexpected recursion.");
return parseCpu("invalid");
}

View File

@ -6806,67 +6806,48 @@ bool AArch64AsmParser::ParseDirective(AsmToken DirectiveID) {
return false;
}
static void ExpandCryptoAEK(AArch64::ArchKind ArchKind,
static void ExpandCryptoAEK(const AArch64::ArchInfo &ArchInfo,
SmallVector<StringRef, 4> &RequestedExtensions) {
const bool NoCrypto = llvm::is_contained(RequestedExtensions, "nocrypto");
const bool Crypto = llvm::is_contained(RequestedExtensions, "crypto");
if (!NoCrypto && Crypto) {
switch (ArchKind) {
default:
// Map 'generic' (and others) to sha2 and aes, because
// that was the traditional meaning of crypto.
case AArch64::ArchKind::ARMV8_1A:
case AArch64::ArchKind::ARMV8_2A:
case AArch64::ArchKind::ARMV8_3A:
// Map 'generic' (and others) to sha2 and aes, because
// that was the traditional meaning of crypto.
if (ArchInfo == AArch64::ARMV8_1A || ArchInfo == AArch64::ARMV8_2A ||
ArchInfo == AArch64::ARMV8_3A) {
RequestedExtensions.push_back("sha2");
RequestedExtensions.push_back("aes");
break;
case AArch64::ArchKind::ARMV8_4A:
case AArch64::ArchKind::ARMV8_5A:
case AArch64::ArchKind::ARMV8_6A:
case AArch64::ArchKind::ARMV8_7A:
case AArch64::ArchKind::ARMV8_8A:
case AArch64::ArchKind::ARMV8_9A:
case AArch64::ArchKind::ARMV9A:
case AArch64::ArchKind::ARMV9_1A:
case AArch64::ArchKind::ARMV9_2A:
case AArch64::ArchKind::ARMV9_3A:
case AArch64::ArchKind::ARMV9_4A:
case AArch64::ArchKind::ARMV8R:
}
if (ArchInfo == AArch64::ARMV8_4A || ArchInfo == AArch64::ARMV8_5A ||
ArchInfo == AArch64::ARMV8_6A || ArchInfo == AArch64::ARMV8_7A ||
ArchInfo == AArch64::ARMV8_8A || ArchInfo == AArch64::ARMV8_9A ||
ArchInfo == AArch64::ARMV9A || ArchInfo == AArch64::ARMV9_1A ||
ArchInfo == AArch64::ARMV9_2A || ArchInfo == AArch64::ARMV9_3A ||
ArchInfo == AArch64::ARMV9_4A || ArchInfo == AArch64::ARMV8R) {
RequestedExtensions.push_back("sm4");
RequestedExtensions.push_back("sha3");
RequestedExtensions.push_back("sha2");
RequestedExtensions.push_back("aes");
break;
}
} else if (NoCrypto) {
switch (ArchKind) {
default:
// Map 'generic' (and others) to sha2 and aes, because
// that was the traditional meaning of crypto.
case AArch64::ArchKind::ARMV8_1A:
case AArch64::ArchKind::ARMV8_2A:
case AArch64::ArchKind::ARMV8_3A:
// Map 'generic' (and others) to sha2 and aes, because
// that was the traditional meaning of crypto.
if (ArchInfo == AArch64::ARMV8_1A || ArchInfo == AArch64::ARMV8_2A ||
ArchInfo == AArch64::ARMV8_3A) {
RequestedExtensions.push_back("nosha2");
RequestedExtensions.push_back("noaes");
break;
case AArch64::ArchKind::ARMV8_4A:
case AArch64::ArchKind::ARMV8_5A:
case AArch64::ArchKind::ARMV8_6A:
case AArch64::ArchKind::ARMV8_7A:
case AArch64::ArchKind::ARMV8_8A:
case AArch64::ArchKind::ARMV8_9A:
case AArch64::ArchKind::ARMV9A:
case AArch64::ArchKind::ARMV9_1A:
case AArch64::ArchKind::ARMV9_2A:
case AArch64::ArchKind::ARMV9_3A:
case AArch64::ArchKind::ARMV9_4A:
}
if (ArchInfo == AArch64::ARMV8_4A || ArchInfo == AArch64::ARMV8_5A ||
ArchInfo == AArch64::ARMV8_6A || ArchInfo == AArch64::ARMV8_7A ||
ArchInfo == AArch64::ARMV8_8A || ArchInfo == AArch64::ARMV8_9A ||
ArchInfo == AArch64::ARMV9A || ArchInfo == AArch64::ARMV9_1A ||
ArchInfo == AArch64::ARMV9_2A || ArchInfo == AArch64::ARMV9_3A ||
ArchInfo == AArch64::ARMV9_4A) {
RequestedExtensions.push_back("nosm4");
RequestedExtensions.push_back("nosha3");
RequestedExtensions.push_back("nosha2");
RequestedExtensions.push_back("noaes");
break;
}
}
}
@ -6880,8 +6861,8 @@ bool AArch64AsmParser::parseDirectiveArch(SMLoc L) {
std::tie(Arch, ExtensionString) =
getParser().parseStringToEndOfStatement().trim().split('+');
AArch64::ArchKind ID = AArch64::parseArch(Arch);
if (ID == AArch64::ArchKind::INVALID)
const AArch64::ArchInfo &ArchInfo = AArch64::parseArch(Arch);
if (ArchInfo == AArch64::INVALID)
return Error(ArchLoc, "unknown arch name");
if (parseToken(AsmToken::EndOfStatement))
@ -6889,9 +6870,9 @@ bool AArch64AsmParser::parseDirectiveArch(SMLoc L) {
// Get the architecture and extension features.
std::vector<StringRef> AArch64Features;
AArch64Features.push_back(AArch64::getArchFeature(ID));
AArch64::getExtensionFeatures(AArch64::getDefaultExtensions("generic", ID),
AArch64Features);
AArch64Features.push_back(ArchInfo.ArchFeature);
AArch64::getExtensionFeatures(
AArch64::getDefaultExtensions("generic", ArchInfo), AArch64Features);
MCSubtargetInfo &STI = copySTI();
std::vector<std::string> ArchFeatures(AArch64Features.begin(), AArch64Features.end());
@ -6902,7 +6883,7 @@ bool AArch64AsmParser::parseDirectiveArch(SMLoc L) {
if (!ExtensionString.empty())
ExtensionString.split(RequestedExtensions, '+');
ExpandCryptoAEK(ID, RequestedExtensions);
ExpandCryptoAEK(ArchInfo, RequestedExtensions);
FeatureBitset Features = STI.getFeatureBits();
for (auto Name : RequestedExtensions) {
@ -6998,7 +6979,7 @@ bool AArch64AsmParser::parseDirectiveCPU(SMLoc L) {
STI.setDefaultFeatures(CPU, /*TuneCPU*/ CPU, "");
CurLoc = incrementLoc(CurLoc, CPU.size());
ExpandCryptoAEK(llvm::AArch64::getCPUArchKind(CPU), RequestedExtensions);
ExpandCryptoAEK(llvm::AArch64::getArchForCpu(CPU), RequestedExtensions);
for (auto Name : RequestedExtensions) {
// Advance source location past '+'.

View File

@ -952,11 +952,11 @@ class AArch64CPUTestFixture
TEST_P(AArch64CPUTestFixture, testAArch64CPU) {
ARMCPUTestParams params = GetParam();
AArch64::ArchKind AK = AArch64::parseCPUArch(params.CPUName);
EXPECT_EQ(params.ExpectedArch, AArch64::getArchName(AK));
const AArch64::ArchInfo &AI = AArch64::parseCpu(params.CPUName).Arch;
EXPECT_EQ(params.ExpectedArch, AI.Name);
uint64_t default_extensions =
AArch64::getDefaultExtensions(params.CPUName, AK);
AArch64::getDefaultExtensions(params.CPUName, AI);
EXPECT_PRED_FORMAT2(AssertSameExtensionFlags<ARM::ISAKind::AARCH64>,
params.ExpectedFlags, default_extensions);
}
@ -1402,14 +1402,14 @@ TEST(TargetParserTest, testAArch64CPUArchList) {
// valid, and match the expected 'magic' count.
EXPECT_EQ(List.size(), NumAArch64CPUArchs);
for(StringRef CPU : List) {
EXPECT_NE(AArch64::parseCPUArch(CPU), AArch64::ArchKind::INVALID);
EXPECT_NE(AArch64::parseCpu(CPU).Arch, AArch64::INVALID);
}
}
bool testAArch64Arch(StringRef Arch, StringRef DefaultCPU, StringRef SubArch,
unsigned ArchAttr) {
AArch64::ArchKind AK = AArch64::parseArch(Arch);
return AK != AArch64::ArchKind::INVALID;
const AArch64::ArchInfo &AI = AArch64::parseArch(Arch);
return AI != AArch64::INVALID;
}
TEST(TargetParserTest, testAArch64Arch) {
@ -1445,148 +1445,81 @@ TEST(TargetParserTest, testAArch64Arch) {
ARMBuildAttrs::CPUArch::v8_A));
}
bool testAArch64Extension(StringRef CPUName, AArch64::ArchKind AK,
bool testAArch64Extension(StringRef CPUName, const AArch64::ArchInfo &AI,
StringRef ArchExt) {
return AArch64::getDefaultExtensions(CPUName, AK) &
return AArch64::getDefaultExtensions(CPUName, AI) &
AArch64::parseArchExt(ArchExt);
}
TEST(TargetParserTest, testAArch64Extension) {
EXPECT_FALSE(testAArch64Extension("cortex-a34",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a35",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a53",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a55",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a55",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("cortex-a55",
AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("cortex-a55",
AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_FALSE(testAArch64Extension("cortex-a57",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a72",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a73",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a75",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a75",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("cortex-a75",
AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("cortex-a75",
AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("cortex-r82",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-r82",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("cortex-r82",
AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("cortex-r82",
AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("cortex-r82",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("cyclone",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("exynos-m3",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m4",
AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("exynos-m4",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("exynos-m4",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("exynos-m4",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m4",
AArch64::ArchKind::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("exynos-m5",
AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("exynos-m5",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("exynos-m5",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("exynos-m5",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m5",
AArch64::ArchKind::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("falkor",
AArch64::ArchKind::INVALID, "rdm"));
EXPECT_FALSE(testAArch64Extension("kryo",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "crc"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "rcpc"));
EXPECT_TRUE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "profile"));
EXPECT_FALSE(testAArch64Extension("saphira",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("thunderx2t99",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("thunderx",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt81",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt83",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt88",
AArch64::ArchKind::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "crypto"));
EXPECT_FALSE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "sha3"));
EXPECT_FALSE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "sm4"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "profile"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("tsv110",
AArch64::ArchKind::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("a64fx",
AArch64::ArchKind::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("a64fx",
AArch64::ArchKind::INVALID, "sve"));
EXPECT_FALSE(testAArch64Extension("a64fx",
AArch64::ArchKind::INVALID, "sve2"));
EXPECT_TRUE(
testAArch64Extension("carmel", AArch64::ArchKind::INVALID, "crypto"));
EXPECT_TRUE(
testAArch64Extension("carmel", AArch64::ArchKind::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("cortex-a34", AArch64::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a35", AArch64::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a53", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a55", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a55", AArch64::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("cortex-a55", AArch64::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("cortex-a55", AArch64::INVALID, "dotprod"));
EXPECT_FALSE(testAArch64Extension("cortex-a57", AArch64::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a72", AArch64::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("cortex-a73", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a75", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-a75", AArch64::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("cortex-a75", AArch64::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("cortex-a75", AArch64::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("cortex-r82", AArch64::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("cyclone", AArch64::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("exynos-m3", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m4", AArch64::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("exynos-m5", AArch64::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("falkor", AArch64::INVALID, "rdm"));
EXPECT_FALSE(testAArch64Extension("kryo", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "crc"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "rdm"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "rcpc"));
EXPECT_TRUE(testAArch64Extension("saphira", AArch64::INVALID, "profile"));
EXPECT_FALSE(testAArch64Extension("saphira", AArch64::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension("thunderx2t99", AArch64::INVALID, "ras"));
EXPECT_FALSE(testAArch64Extension("thunderx", AArch64::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt81", AArch64::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt83", AArch64::INVALID, "lse"));
EXPECT_FALSE(testAArch64Extension("thunderxt88", AArch64::INVALID, "lse"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "crypto"));
EXPECT_FALSE(testAArch64Extension("tsv110", AArch64::INVALID, "sha3"));
EXPECT_FALSE(testAArch64Extension("tsv110", AArch64::INVALID, "sm4"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "ras"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "profile"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "fp16fml"));
EXPECT_TRUE(testAArch64Extension("tsv110", AArch64::INVALID, "dotprod"));
EXPECT_TRUE(testAArch64Extension("a64fx", AArch64::INVALID, "fp16"));
EXPECT_TRUE(testAArch64Extension("a64fx", AArch64::INVALID, "sve"));
EXPECT_FALSE(testAArch64Extension("a64fx", AArch64::INVALID, "sve2"));
EXPECT_TRUE(testAArch64Extension("carmel", AArch64::INVALID, "crypto"));
EXPECT_TRUE(testAArch64Extension("carmel", AArch64::INVALID, "fp16"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8A, "ras"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_1A, "ras"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_2A, "profile"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_2A, "fp16"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_2A, "fp16fml"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_3A, "fp16"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_3A, "fp16fml"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_4A, "fp16"));
EXPECT_FALSE(testAArch64Extension(
"generic", AArch64::ArchKind::ARMV8_4A, "fp16fml"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8A, "ras"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_1A, "ras"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_2A, "profile"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_2A, "fp16"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_2A, "fp16fml"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_3A, "fp16"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_3A, "fp16fml"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_4A, "fp16"));
EXPECT_FALSE(testAArch64Extension("generic", AArch64::ARMV8_4A, "fp16fml"));
}
TEST(TargetParserTest, AArch64ExtensionFeatures) {
@ -1689,44 +1622,81 @@ TEST(TargetParserTest, AArch64ExtensionFeatures) {
}
TEST(TargetParserTest, AArch64ArchFeatures) {
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::INVALID), "+");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8A), "+v8a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_1A), "+v8.1a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_2A), "+v8.2a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_3A), "+v8.3a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_4A), "+v8.4a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_5A), "+v8.5a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_6A), "+v8.6a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_7A), "+v8.7a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_8A), "+v8.8a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8_9A), "+v8.9a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV9A), "+v9a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV9_1A), "+v9.1a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV9_2A), "+v9.2a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV9_3A), "+v9.3a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV9_4A), "+v9.4a");
EXPECT_EQ(AArch64::getArchFeature(AArch64::ArchKind::ARMV8R), "+v8r");
EXPECT_EQ(AArch64::INVALID.ArchFeature, "+");
EXPECT_EQ(AArch64::ARMV8A.ArchFeature, "+v8a");
EXPECT_EQ(AArch64::ARMV8_1A.ArchFeature, "+v8.1a");
EXPECT_EQ(AArch64::ARMV8_2A.ArchFeature, "+v8.2a");
EXPECT_EQ(AArch64::ARMV8_3A.ArchFeature, "+v8.3a");
EXPECT_EQ(AArch64::ARMV8_4A.ArchFeature, "+v8.4a");
EXPECT_EQ(AArch64::ARMV8_5A.ArchFeature, "+v8.5a");
EXPECT_EQ(AArch64::ARMV8_6A.ArchFeature, "+v8.6a");
EXPECT_EQ(AArch64::ARMV8_7A.ArchFeature, "+v8.7a");
EXPECT_EQ(AArch64::ARMV8_8A.ArchFeature, "+v8.8a");
EXPECT_EQ(AArch64::ARMV8_9A.ArchFeature, "+v8.9a");
EXPECT_EQ(AArch64::ARMV9A.ArchFeature, "+v9a");
EXPECT_EQ(AArch64::ARMV9_1A.ArchFeature, "+v9.1a");
EXPECT_EQ(AArch64::ARMV9_2A.ArchFeature, "+v9.2a");
EXPECT_EQ(AArch64::ARMV9_3A.ArchFeature, "+v9.3a");
EXPECT_EQ(AArch64::ARMV9_4A.ArchFeature, "+v9.4a");
EXPECT_EQ(AArch64::ARMV8R.ArchFeature, "+v8r");
}
TEST(TargetParserTest, AArch64ArchV9toV8Conversion) {
for (auto AK : AArch64::ArchKinds) {
if (AK == AArch64::ArchKind::INVALID)
EXPECT_EQ(AK, AArch64::convertV9toV8(AK));
else if (AK < AArch64::ArchKind::ARMV9A)
EXPECT_EQ(AK, AArch64::convertV9toV8(AK));
else if (AK >= AArch64::ArchKind::ARMV8R)
EXPECT_EQ(AArch64::ArchKind::INVALID, AArch64::convertV9toV8(AK));
else
EXPECT_TRUE(AArch64::convertV9toV8(AK) < AArch64::ArchKind::ARMV9A);
TEST(TargetParserTest, AArch64ArchPartialOrder) {
EXPECT_FALSE(AArch64::INVALID.implies(AArch64::INVALID));
for (const auto *A : AArch64::ArchInfos) {
EXPECT_EQ(*A, *A);
if (!(*A == *A)) {
EXPECT_NE(*A, *A);
}
// Comparison with invalid is always false
EXPECT_FALSE(A->implies(AArch64::INVALID));
EXPECT_FALSE(AArch64::INVALID.implies(*A));
// v8r has no relation to other valid architectures
if (*A != AArch64::ARMV8R) {
EXPECT_FALSE(A->implies(AArch64::ARMV8R));
EXPECT_FALSE(AArch64::ARMV8R.implies(*A));
}
}
EXPECT_EQ(AArch64::ArchKind::ARMV8_5A,
AArch64::convertV9toV8(AArch64::ArchKind::ARMV9A));
EXPECT_EQ(AArch64::ArchKind::ARMV8_6A,
AArch64::convertV9toV8(AArch64::ArchKind::ARMV9_1A));
EXPECT_EQ(AArch64::ArchKind::ARMV8_7A,
AArch64::convertV9toV8(AArch64::ArchKind::ARMV9_2A));
EXPECT_EQ(AArch64::ArchKind::ARMV8_8A,
AArch64::convertV9toV8(AArch64::ArchKind::ARMV9_3A));
for (const auto *A : {
&AArch64::ARMV8_1A,
&AArch64::ARMV8_2A,
&AArch64::ARMV8_3A,
&AArch64::ARMV8_4A,
&AArch64::ARMV8_5A,
&AArch64::ARMV8_6A,
&AArch64::ARMV8_7A,
&AArch64::ARMV8_8A,
&AArch64::ARMV8_9A,
})
EXPECT_TRUE(A->implies(AArch64::ARMV8A));
for (const auto *A : {&AArch64::ARMV9_1A, &AArch64::ARMV9_2A,
&AArch64::ARMV9_3A, &AArch64::ARMV9_4A})
EXPECT_TRUE(A->implies(AArch64::ARMV9A));
EXPECT_TRUE(AArch64::ARMV8_1A.implies(AArch64::ARMV8A));
EXPECT_TRUE(AArch64::ARMV8_2A.implies(AArch64::ARMV8_1A));
EXPECT_TRUE(AArch64::ARMV8_3A.implies(AArch64::ARMV8_2A));
EXPECT_TRUE(AArch64::ARMV8_4A.implies(AArch64::ARMV8_3A));
EXPECT_TRUE(AArch64::ARMV8_5A.implies(AArch64::ARMV8_4A));
EXPECT_TRUE(AArch64::ARMV8_6A.implies(AArch64::ARMV8_5A));
EXPECT_TRUE(AArch64::ARMV8_7A.implies(AArch64::ARMV8_6A));
EXPECT_TRUE(AArch64::ARMV8_8A.implies(AArch64::ARMV8_7A));
EXPECT_TRUE(AArch64::ARMV8_9A.implies(AArch64::ARMV8_8A));
EXPECT_TRUE(AArch64::ARMV9_1A.implies(AArch64::ARMV9A));
EXPECT_TRUE(AArch64::ARMV9_2A.implies(AArch64::ARMV9_1A));
EXPECT_TRUE(AArch64::ARMV9_3A.implies(AArch64::ARMV9_2A));
EXPECT_TRUE(AArch64::ARMV9_4A.implies(AArch64::ARMV9_3A));
EXPECT_TRUE(AArch64::ARMV9A.implies(AArch64::ARMV8_5A));
EXPECT_TRUE(AArch64::ARMV9_1A.implies(AArch64::ARMV8_6A));
EXPECT_TRUE(AArch64::ARMV9_2A.implies(AArch64::ARMV8_7A));
EXPECT_TRUE(AArch64::ARMV9_3A.implies(AArch64::ARMV8_8A));
EXPECT_TRUE(AArch64::ARMV9_4A.implies(AArch64::ARMV8_9A));
}
TEST(TargetParserTest, AArch64ArchExtFeature) {