[llvm-profdata] Add support for weighted merge of profile data

This change adds support for an optional weight when merging profile data with the llvm-profdata tool.
Weights are specified by adding an option ':<weight>' suffix to the input file names.

Adding support for arbitrary weighting of input profile data allows for relative importance to be placed on the
input data from multiple training runs.

Both sampled and instrumented profiles are supported.

Reviewers: dnovillo, bogner, davidxl

Subscribers: llvm-commits

Differential Revision: http://reviews.llvm.org/D14547

llvm-svn: 254669
This commit is contained in:
Nathan Slingerland 2015-12-04 00:00:20 +00:00
parent 09330577ff
commit 2a3dbe8be2
13 changed files with 266 additions and 47 deletions

View File

@ -28,7 +28,7 @@ MERGE
SYNOPSIS
^^^^^^^^
:program:`llvm-profdata merge` [*options*] [*filenames...*]
:program:`llvm-profdata merge` [*options*] [*filename[:weight]...*]
DESCRIPTION
^^^^^^^^^^^
@ -37,6 +37,10 @@ DESCRIPTION
generated by PGO instrumentation and merges them together into a single
indexed profile data file.
The profile counts in each input file can be scaled (multiplied) by specifying
``<filename>:<weight>``, where `<weight>` is a decimal integer >= 1.
A default weight of 1 is assumed if only `<filename>` is given.
OPTIONS
^^^^^^^

View File

@ -218,7 +218,8 @@ struct InstrProfValueSiteRecord {
}
/// Merge data from another InstrProfValueSiteRecord
void mergeValueData(InstrProfValueSiteRecord &Input) {
/// Optionally scale merged counts by \p Weight.
void mergeValueData(InstrProfValueSiteRecord &Input, uint64_t Weight = 1) {
this->sortByTargetValues();
Input.sortByTargetValues();
auto I = ValueData.begin();
@ -228,7 +229,11 @@ struct InstrProfValueSiteRecord {
while (I != IE && I->Value < J->Value)
++I;
if (I != IE && I->Value == J->Value) {
I->Count = SaturatingAdd(I->Count, J->Count);
// TODO: Check for counter overflow and return error if it occurs.
uint64_t JCount = J->Count;
if (Weight > 1)
JCount = SaturatingMultiply(JCount, Weight);
I->Count = SaturatingAdd(I->Count, JCount);
++I;
continue;
}
@ -274,7 +279,8 @@ struct InstrProfRecord {
ValueMapType *HashKeys);
/// Merge the counts in \p Other into this one.
inline instrprof_error merge(InstrProfRecord &Other);
/// Optionally scale merged counts by \p Weight.
inline instrprof_error merge(InstrProfRecord &Other, uint64_t Weight = 1);
/// Used by InstrProfWriter: update the value strings to commoned strings in
/// the writer instance.
@ -326,7 +332,9 @@ private:
}
// Merge Value Profile data from Src record to this record for ValueKind.
instrprof_error mergeValueProfData(uint32_t ValueKind, InstrProfRecord &Src) {
// Scale merged value counts by \p Weight.
instrprof_error mergeValueProfData(uint32_t ValueKind, InstrProfRecord &Src,
uint64_t Weight) {
uint32_t ThisNumValueSites = getNumValueSites(ValueKind);
uint32_t OtherNumValueSites = Src.getNumValueSites(ValueKind);
if (ThisNumValueSites != OtherNumValueSites)
@ -336,7 +344,7 @@ private:
std::vector<InstrProfValueSiteRecord> &OtherSiteRecords =
Src.getValueSitesForKind(ValueKind);
for (uint32_t I = 0; I < ThisNumValueSites; I++)
ThisSiteRecords[I].mergeValueData(OtherSiteRecords[I]);
ThisSiteRecords[I].mergeValueData(OtherSiteRecords[I], Weight);
return instrprof_error::success;
}
};
@ -422,7 +430,8 @@ void InstrProfRecord::updateStrings(InstrProfStringTable *StrTab) {
VData.Value = (uint64_t)StrTab->insertString((const char *)VData.Value);
}
instrprof_error InstrProfRecord::merge(InstrProfRecord &Other) {
instrprof_error InstrProfRecord::merge(InstrProfRecord &Other,
uint64_t Weight) {
// If the number of counters doesn't match we either have bad data
// or a hash collision.
if (Counts.size() != Other.Counts.size())
@ -432,13 +441,19 @@ instrprof_error InstrProfRecord::merge(InstrProfRecord &Other) {
for (size_t I = 0, E = Other.Counts.size(); I < E; ++I) {
bool ResultOverflowed;
Counts[I] = SaturatingAdd(Counts[I], Other.Counts[I], ResultOverflowed);
uint64_t OtherCount = Other.Counts[I];
if (Weight > 1) {
OtherCount = SaturatingMultiply(OtherCount, Weight, ResultOverflowed);
if (ResultOverflowed)
Result = instrprof_error::counter_overflow;
}
Counts[I] = SaturatingAdd(Counts[I], OtherCount, ResultOverflowed);
if (ResultOverflowed)
Result = instrprof_error::counter_overflow;
}
for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) {
instrprof_error MergeValueResult = mergeValueProfData(Kind, Other);
instrprof_error MergeValueResult = mergeValueProfData(Kind, Other, Weight);
if (MergeValueResult != instrprof_error::success)
Result = MergeValueResult;
}

View File

@ -39,8 +39,8 @@ public:
void updateStringTableReferences(InstrProfRecord &I);
/// Add function counts for the given function. If there are already counts
/// for this function and the hash and number of counts match, each counter is
/// summed.
std::error_code addRecord(InstrProfRecord &&I);
/// summed. Optionally scale counts by \p Weight.
std::error_code addRecord(InstrProfRecord &&I, uint64_t Weight = 1);
/// Write the profile to \c OS
void write(raw_fd_ostream &OS);
/// Write the profile in text format to \c OS

View File

@ -173,19 +173,25 @@ public:
SampleRecord() : NumSamples(0), CallTargets() {}
/// Increment the number of samples for this record by \p S.
/// Optionally scale sample count \p S by \p Weight.
///
/// Sample counts accumulate using saturating arithmetic, to avoid wrapping
/// around unsigned integers.
void addSamples(uint64_t S) {
void addSamples(uint64_t S, uint64_t Weight = 1) {
if (Weight > 1)
S = SaturatingMultiply(S, Weight);
NumSamples = SaturatingAdd(NumSamples, S);
}
/// Add called function \p F with samples \p S.
/// Optionally scale sample count \p S by \p Weight.
///
/// Sample counts accumulate using saturating arithmetic, to avoid wrapping
/// around unsigned integers.
void addCalledTarget(StringRef F, uint64_t S) {
void addCalledTarget(StringRef F, uint64_t S, uint64_t Weight = 1) {
uint64_t &TargetSamples = CallTargets[F];
if (Weight > 1)
S = SaturatingMultiply(S, Weight);
TargetSamples = SaturatingAdd(TargetSamples, S);
}
@ -196,10 +202,11 @@ public:
const CallTargetMap &getCallTargets() const { return CallTargets; }
/// Merge the samples in \p Other into this record.
void merge(const SampleRecord &Other) {
addSamples(Other.getSamples());
/// Optionally scale sample counts by \p Weight.
void merge(const SampleRecord &Other, uint64_t Weight = 1) {
addSamples(Other.getSamples(), Weight);
for (const auto &I : Other.getCallTargets())
addCalledTarget(I.first(), I.second);
addCalledTarget(I.first(), I.second, Weight);
}
void print(raw_ostream &OS, unsigned Indent) const;
@ -226,16 +233,26 @@ public:
FunctionSamples() : TotalSamples(0), TotalHeadSamples(0) {}
void print(raw_ostream &OS = dbgs(), unsigned Indent = 0) const;
void dump() const;
void addTotalSamples(uint64_t Num) { TotalSamples += Num; }
void addHeadSamples(uint64_t Num) { TotalHeadSamples += Num; }
void addBodySamples(uint32_t LineOffset, uint32_t Discriminator,
uint64_t Num) {
BodySamples[LineLocation(LineOffset, Discriminator)].addSamples(Num);
void addTotalSamples(uint64_t Num, uint64_t Weight = 1) {
if (Weight > 1)
Num = SaturatingMultiply(Num, Weight);
TotalSamples += Num;
}
void addHeadSamples(uint64_t Num, uint64_t Weight = 1) {
if (Weight > 1)
Num = SaturatingMultiply(Num, Weight);
TotalHeadSamples += Num;
}
void addBodySamples(uint32_t LineOffset, uint32_t Discriminator, uint64_t Num,
uint64_t Weight = 1) {
BodySamples[LineLocation(LineOffset, Discriminator)].addSamples(Num,
Weight);
}
void addCalledTargetSamples(uint32_t LineOffset, uint32_t Discriminator,
std::string FName, uint64_t Num) {
BodySamples[LineLocation(LineOffset, Discriminator)].addCalledTarget(FName,
Num);
std::string FName, uint64_t Num,
uint64_t Weight = 1) {
BodySamples[LineLocation(LineOffset, Discriminator)].addCalledTarget(
FName, Num, Weight);
}
/// Return the number of samples collected at the given location.
@ -284,18 +301,19 @@ public:
}
/// Merge the samples in \p Other into this one.
void merge(const FunctionSamples &Other) {
addTotalSamples(Other.getTotalSamples());
addHeadSamples(Other.getHeadSamples());
/// Optionally scale samples by \p Weight.
void merge(const FunctionSamples &Other, uint64_t Weight = 1) {
addTotalSamples(Other.getTotalSamples(), Weight);
addHeadSamples(Other.getHeadSamples(), Weight);
for (const auto &I : Other.getBodySamples()) {
const LineLocation &Loc = I.first;
const SampleRecord &Rec = I.second;
BodySamples[Loc].merge(Rec);
BodySamples[Loc].merge(Rec, Weight);
}
for (const auto &I : Other.getCallsiteSamples()) {
const CallsiteLocation &Loc = I.first;
const FunctionSamples &Rec = I.second;
functionSamplesAt(Loc).merge(Rec);
functionSamplesAt(Loc).merge(Rec, Weight);
}
}

View File

@ -98,7 +98,8 @@ void InstrProfWriter::updateStringTableReferences(InstrProfRecord &I) {
I.updateStrings(&StringTable);
}
std::error_code InstrProfWriter::addRecord(InstrProfRecord &&I) {
std::error_code InstrProfWriter::addRecord(InstrProfRecord &&I,
uint64_t Weight) {
updateStringTableReferences(I);
auto &ProfileDataMap = FunctionData[I.Name];
@ -113,9 +114,18 @@ std::error_code InstrProfWriter::addRecord(InstrProfRecord &&I) {
// We've never seen a function with this name and hash, add it.
Dest = std::move(I);
Result = instrprof_error::success;
if (Weight > 1) {
for (auto &Count : Dest.Counts) {
bool Overflowed;
Count = SaturatingMultiply(Count, Weight, Overflowed);
if (Overflowed && Result == instrprof_error::success) {
Result = instrprof_error::counter_overflow;
}
}
}
} else {
// We're updating a function we've seen before.
Result = Dest.merge(I);
Result = Dest.merge(I, Weight);
}
// We keep track of the max function count as we go for simplicity.

View File

@ -0,0 +1,8 @@
bar:1772037:35370
17: 35370
18: 35370
19: 7005
20: 29407
21: 12170
23: 18150 bar:19829
25: 36666

View File

@ -0,0 +1,8 @@
foo:1763288:35327
7: 35327
8: 35327
9: 6930
10: 29341
11: 11906
13: 18185 foo:19531
15: 36458

View File

@ -0,0 +1,55 @@
Tests for weighted merge of instrumented profiles.
1- Merge the foo and bar profiles with unity weight and verify the combined output
RUN: llvm-profdata merge --instr %p/Inputs/weight-instr-bar.profdata:1 %p/Inputs/weight-instr-foo.profdata:1 -o %t
RUN: llvm-profdata show --instr -all-functions %t | FileCheck %s --check-prefix=WEIGHT1
WEIGHT1: Counters:
WEIGHT1: usage:
WEIGHT1: Hash: 0x0000000000000000
WEIGHT1: Counters: 1
WEIGHT1: Function count: 0
WEIGHT1: foo:
WEIGHT1: Hash: 0x000000000000028a
WEIGHT1: Counters: 3
WEIGHT1: Function count: 866988873
WEIGHT1: bar:
WEIGHT1: Hash: 0x000000000000028a
WEIGHT1: Counters: 3
WEIGHT1: Function count: 866988873
WEIGHT1: main:
WEIGHT1: Hash: 0x7d31c47ea98f8248
WEIGHT1: Counters: 60
WEIGHT1: Function count: 2
WEIGHT1: Functions shown: 4
WEIGHT1: Total functions: 4
WEIGHT1: Maximum function count: 866988873
WEIGHT1: Maximum internal block count: 267914296
2- Merge the foo and bar profiles with weight 3x and 5x respectively and verify the combined output
RUN: llvm-profdata merge --instr %p/Inputs/weight-instr-bar.profdata:3 %p/Inputs/weight-instr-foo.profdata:5 -o %t
RUN: llvm-profdata show --instr -all-functions %t | FileCheck %s --check-prefix=WEIGHT2
WEIGHT2: Counters:
WEIGHT2: usage:
WEIGHT2: Hash: 0x0000000000000000
WEIGHT2: Counters: 1
WEIGHT2: Function count: 0
WEIGHT2: foo:
WEIGHT2: Hash: 0x000000000000028a
WEIGHT2: Counters: 3
WEIGHT2: Function count: 4334944365
WEIGHT2: bar:
WEIGHT2: Hash: 0x000000000000028a
WEIGHT2: Counters: 3
WEIGHT2: Function count: 2600966619
WEIGHT2: main:
WEIGHT2: Hash: 0x7d31c47ea98f8248
WEIGHT2: Counters: 60
WEIGHT2: Function count: 8
WEIGHT2: Functions shown: 4
WEIGHT2: Total functions: 4
WEIGHT2: Maximum function count: 4334944365
WEIGHT2: Maximum internal block count: 1339571480
3- Bad merge: foo and bar profiles with invalid weights
RUN: not llvm-profdata merge --instr %p/Inputs/weight-instr-bar.profdata:3 %p/Inputs/weight-instr-foo.profdata:-5 -o %t.out 2>&1 | FileCheck %s --check-prefix=ERROR3
ERROR3: error: Input weight must be a positive integer.

View File

@ -0,0 +1,43 @@
Tests for weighted merge of sample profiles.
1- Merge the foo and bar profiles with unity weight and verify the combined output
RUN: llvm-profdata merge --sample --text %p/Inputs/weight-sample-bar.proftext:1 %p/Inputs/weight-sample-foo.proftext:1 -o - | FileCheck %s --check-prefix=WEIGHT1
WEIGHT1: foo:1763288:35327
WEIGHT1: 7: 35327
WEIGHT1: 8: 35327
WEIGHT1: 9: 6930
WEIGHT1: 10: 29341
WEIGHT1: 11: 11906
WEIGHT1: 13: 18185 foo:19531
WEIGHT1: 15: 36458
WEIGHT1: bar:1772037:35370
WEIGHT1: 17: 35370
WEIGHT1: 18: 35370
WEIGHT1: 19: 7005
WEIGHT1: 20: 29407
WEIGHT1: 21: 12170
WEIGHT1: 23: 18150 bar:19829
WEIGHT1: 25: 36666
2- Merge the foo and bar profiles with weight 3x and 5x respectively and verify the combined output
RUN: llvm-profdata merge --sample --text %p/Inputs/weight-sample-bar.proftext:3 %p/Inputs/weight-sample-foo.proftext:5 -o - | FileCheck %s --check-prefix=WEIGHT2
WEIGHT2: foo:8816440:176635
WEIGHT2: 7: 176635
WEIGHT2: 8: 176635
WEIGHT2: 9: 34650
WEIGHT2: 10: 146705
WEIGHT2: 11: 59530
WEIGHT2: 13: 90925 foo:97655
WEIGHT2: 15: 182290
WEIGHT2: bar:5316111:106110
WEIGHT2: 17: 106110
WEIGHT2: 18: 106110
WEIGHT2: 19: 21015
WEIGHT2: 20: 88221
WEIGHT2: 21: 36510
WEIGHT2: 23: 54450 bar:59487
WEIGHT2: 25: 109998
3- Bad merge: foo and bar profiles with invalid weights
RUN: not llvm-profdata merge --sample --text %p/Inputs/weight-sample-bar.proftext:3 %p/Inputs/weight-sample-foo.proftext:-5 -o %t.out 2>&1 | FileCheck %s --check-prefix=ERROR3
ERROR3: error: Input weight must be a positive integer.

View File

@ -12,6 +12,7 @@
//===----------------------------------------------------------------------===//
#include "llvm/ADT/SmallSet.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/StringRef.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/ProfileData/InstrProfReader.h"
@ -27,6 +28,7 @@
#include "llvm/Support/PrettyStackTrace.h"
#include "llvm/Support/Signals.h"
#include "llvm/Support/raw_ostream.h"
#include <tuple>
using namespace llvm;
@ -93,7 +95,17 @@ static void handleMergeWriterError(std::error_code &Error,
}
}
static void mergeInstrProfile(const cl::list<std::string> &Inputs,
struct WeightedFile {
StringRef Filename;
uint64_t Weight;
WeightedFile() {}
WeightedFile(StringRef F, uint64_t W) : Filename{F}, Weight{W} {}
};
typedef SmallVector<WeightedFile, 5> WeightedFileVector;
static void mergeInstrProfile(const WeightedFileVector &Inputs,
StringRef OutputFilename,
ProfileFormat OutputFormat) {
if (OutputFilename.compare("-") == 0)
@ -109,21 +121,21 @@ static void mergeInstrProfile(const cl::list<std::string> &Inputs,
InstrProfWriter Writer;
SmallSet<std::error_code, 4> WriterErrorCodes;
for (const auto &Filename : Inputs) {
auto ReaderOrErr = InstrProfReader::create(Filename);
for (const auto &Input : Inputs) {
auto ReaderOrErr = InstrProfReader::create(Input.Filename);
if (std::error_code ec = ReaderOrErr.getError())
exitWithErrorCode(ec, Filename);
exitWithErrorCode(ec, Input.Filename);
auto Reader = std::move(ReaderOrErr.get());
for (auto &I : *Reader) {
if (std::error_code EC = Writer.addRecord(std::move(I))) {
if (std::error_code EC = Writer.addRecord(std::move(I), Input.Weight)) {
// Only show hint the first time an error occurs.
bool firstTime = WriterErrorCodes.insert(EC).second;
handleMergeWriterError(EC, Filename, I.Name, firstTime);
handleMergeWriterError(EC, Input.Filename, I.Name, firstTime);
}
}
if (Reader->hasError())
exitWithErrorCode(Reader->getError(), Filename);
exitWithErrorCode(Reader->getError(), Input.Filename);
}
if (OutputFormat == PF_Text)
Writer.writeText(Output);
@ -135,7 +147,7 @@ static sampleprof::SampleProfileFormat FormatMap[] = {
sampleprof::SPF_None, sampleprof::SPF_Text, sampleprof::SPF_Binary,
sampleprof::SPF_GCC};
static void mergeSampleProfile(const cl::list<std::string> &Inputs,
static void mergeSampleProfile(const WeightedFileVector &Inputs,
StringRef OutputFilename,
ProfileFormat OutputFormat) {
using namespace sampleprof;
@ -147,11 +159,11 @@ static void mergeSampleProfile(const cl::list<std::string> &Inputs,
auto Writer = std::move(WriterOrErr.get());
StringMap<FunctionSamples> ProfileMap;
SmallVector<std::unique_ptr<sampleprof::SampleProfileReader>, 5> Readers;
for (const auto &Filename : Inputs) {
for (const auto &Input : Inputs) {
auto ReaderOrErr =
SampleProfileReader::create(Filename, getGlobalContext());
SampleProfileReader::create(Input.Filename, getGlobalContext());
if (std::error_code EC = ReaderOrErr.getError())
exitWithErrorCode(EC, Filename);
exitWithErrorCode(EC, Input.Filename);
// We need to keep the readers around until after all the files are
// read so that we do not lose the function names stored in each
@ -160,7 +172,7 @@ static void mergeSampleProfile(const cl::list<std::string> &Inputs,
Readers.push_back(std::move(ReaderOrErr.get()));
const auto Reader = Readers.back().get();
if (std::error_code EC = Reader->read())
exitWithErrorCode(EC, Filename);
exitWithErrorCode(EC, Input.Filename);
StringMap<FunctionSamples> &Profiles = Reader->getProfiles();
for (StringMap<FunctionSamples>::iterator I = Profiles.begin(),
@ -168,15 +180,38 @@ static void mergeSampleProfile(const cl::list<std::string> &Inputs,
I != E; ++I) {
StringRef FName = I->first();
FunctionSamples &Samples = I->second;
ProfileMap[FName].merge(Samples);
ProfileMap[FName].merge(Samples, Input.Weight);
}
}
Writer->write(ProfileMap);
}
static void parseInputFiles(const cl::list<std::string> &Inputs,
WeightedFileVector &WeightedInputs) {
WeightedInputs.reserve(Inputs.size());
for (StringRef Input : Inputs) {
StringRef FileName;
StringRef WeightStr;
std::tie(FileName, WeightStr) = Input.rsplit(':');
if (WeightStr.empty() || sys::fs::exists(Input)) {
// No weight specified or valid path containing delimiter.
WeightedInputs.push_back(WeightedFile(Input, 1));
} else {
// Input weight specified.
uint64_t Weight;
if (WeightStr.getAsInteger(10, Weight) || Weight < 1) {
// Invalid input weight.
exitWithError("Input weight must be a positive integer.");
}
WeightedInputs.push_back(WeightedFile(FileName, Weight));
}
}
}
static int merge_main(int argc, const char *argv[]) {
cl::list<std::string> Inputs(cl::Positional, cl::Required, cl::OneOrMore,
cl::desc("<filenames...>"));
cl::desc("<filename[:weight]...>"));
cl::opt<std::string> OutputFilename("output", cl::value_desc("output"),
cl::init("-"), cl::Required,
@ -198,10 +233,13 @@ static int merge_main(int argc, const char *argv[]) {
cl::ParseCommandLineOptions(argc, argv, "LLVM profile data merger\n");
WeightedFileVector WeightedInputs;
parseInputFiles(Inputs, WeightedInputs);
if (ProfileKind == instr)
mergeInstrProfile(Inputs, OutputFilename, OutputFormat);
mergeInstrProfile(WeightedInputs, OutputFilename, OutputFormat);
else
mergeSampleProfile(Inputs, OutputFilename, OutputFormat);
mergeSampleProfile(WeightedInputs, OutputFilename, OutputFormat);
return 0;
}

View File

@ -490,4 +490,24 @@ TEST_F(InstrProfTest, get_max_function_count) {
ASSERT_EQ(1ULL << 63, Reader->getMaximumFunctionCount());
}
TEST_F(InstrProfTest, get_weighted_function_counts) {
InstrProfRecord Record1("foo", 0x1234, {1, 2});
InstrProfRecord Record2("foo", 0x1235, {3, 4});
Writer.addRecord(std::move(Record1), 3);
Writer.addRecord(std::move(Record2), 5);
auto Profile = Writer.writeBuffer();
readProfile(std::move(Profile));
std::vector<uint64_t> Counts;
ASSERT_TRUE(NoError(Reader->getFunctionCounts("foo", 0x1234, Counts)));
ASSERT_EQ(2U, Counts.size());
ASSERT_EQ(3U, Counts[0]);
ASSERT_EQ(6U, Counts[1]);
ASSERT_TRUE(NoError(Reader->getFunctionCounts("foo", 0x1235, Counts)));
ASSERT_EQ(2U, Counts.size());
ASSERT_EQ(15U, Counts[0]);
ASSERT_EQ(20U, Counts[1]);
}
} // end anonymous namespace