Commit 6b0dcd93 authored by niklas.baumgarten's avatar niklas.baumgarten
Browse files

Updated WelfordAggregate

parent e224c1dc
......@@ -51,7 +51,7 @@ public:
std::vector<double> GetCostVector() const {
std::vector<double> costVector;
for (auto &&[level, entry]: _levelMap)
costVector.push_back(entry.Cost);
costVector.push_back(entry.C);
return costVector;
}
......
#ifndef MLMC_MC_HPP
#define MLMC_MC_HPP
#include "montecarlo/datastructure/EmpiricMeasures.hpp"
#include "montecarlo/datastructure/WelfordAggregate.hpp"
#include "pdesolver/PDESolverCreator.hpp"
......
......@@ -7,11 +7,11 @@ void MonteCarloMap::UpdateSampleCounter(double epsilon) {
double factor = 0.0;
for (auto &[level, mc] : _levelMap)
factor += sqrt(mc.vars.Y * mc.avgs.Cost);
factor += sqrt(mc.aggregate.sVar.Y * mc.aggregate.mean.C);
for (auto &[level, mc] : _levelMap) {
optimalM = (int) (ceil(2 * pow(epsilon, -2) * factor *
sqrt(mc.vars.Y / mc.avgs.Cost)));
sqrt(mc.aggregate.sVar.Y / mc.aggregate.mean.C)));
if (optimalM == 1) optimalM++; // Hack
mc.aggregate.UpdateSampleCounter(optimalM - mc.aggregate.ctr.M);
}
......@@ -20,12 +20,15 @@ void MonteCarloMap::UpdateSampleCounter(double epsilon) {
void MonteCarloMap::AppendLevel(double epsilon, Exponents exponents, int newLevel) {
if (newLevel <= maxLevel) {
auto oldLevel = this->rbegin();
double varsY = oldLevel->second.vars.Y / pow(2.0, exponents.beta);
double avgsCost = oldLevel->second.avgs.Cost * pow(2.0, exponents.gamma);
double sVarY = oldLevel->second.aggregate.sVar.Y / pow(2.0, exponents.beta);
double avgsCost = oldLevel->second.aggregate.mean.C * pow(2.0, exponents.gamma);
_levelMap.insert(
LevelMonteCarloPair(newLevel, MonteCarlo(newLevel, 0, false, true)));
_levelMap.find(newLevel)->second.vars.Y = varsY;
_levelMap.find(newLevel)->second.avgs.Cost = avgsCost;
{newLevel, MonteCarlo(newLevel, 0, false, true)}
);
_levelMap.find(newLevel)->second.aggregate.sVar.Y = sVarY;
_levelMap.find(newLevel)->second.aggregate.mean.C = avgsCost;
this->UpdateSampleCounter(epsilon);
} else {
Exit ("Maximum level has been reached.")
......@@ -47,20 +50,20 @@ bool SampleCounterMap::NoSamplesLeft() {
return noSamplesLeft;
}
void AveragesMap::Update(const MonteCarloMap &mcMap) {
void MeanMap::Update(const MonteCarloMap &mcMap) {
for (auto &&[level, mc]: mcMap) {
_levelMap[level] = mc.avgs;
Q += mc.avgs.Y;
Cost += mc.avgs.Cost; // Todo at the right place?
_levelMap[level] = mc.aggregate.mean;
Q += mc.aggregate.mean.Y;
Cost += mc.aggregate.mean.C; // Todo at the right place?
}
}
void VariancesMap::Update(const MonteCarloMap &mcMap) {
void SVarMap::Update(const MonteCarloMap &mcMap) {
for (auto &[level, mc]: mcMap)
_levelMap[level] = mc.vars;
_levelMap[level] = mc.aggregate.sVar;
}
void KurtosisMap::Update(const MonteCarloMap &mcMap) {
for (auto &[level, mc]: mcMap)
_levelMap[level] = mc.kurtosis;
_levelMap[level] = mc.aggregate.kurtosis;
}
#ifndef EMPIRICMEASURELEVELMAPS_HPP
#define EMPIRICMEASURELEVELMAPS_HPP
#include "EmpiricMeasures.hpp"
#include "WelfordAggregate.hpp"
#include "basics/Level.hpp"
#include "montecarlo/MonteCarlo.hpp"
......@@ -10,8 +9,6 @@
struct Exponents;
typedef std::pair<int, MonteCarlo> LevelMonteCarloPair;
struct MonteCarloMap : public LevelMap<MonteCarlo> {
int maxLevel = 10;
......@@ -20,7 +17,7 @@ struct MonteCarloMap : public LevelMap<MonteCarlo> {
config.get("maxLevel", maxLevel);
};
MonteCarloMap(std::initializer_list<LevelMonteCarloPair> mcMap) :
MonteCarloMap(std::initializer_list<std::pair<int, MonteCarlo>> mcMap) :
LevelMap<MonteCarlo>(mcMap) {
config.get("maxLevel", maxLevel);
};
......@@ -42,30 +39,30 @@ struct SampleCounterMap : public LevelMap<SampleCounter> {
bool NoSamplesLeft();
};
struct AveragesMap : public LevelMap<Averages> {
struct MeanMap : public LevelMap<Mean> {
double Q = 0.0;
double Cost = 0.0;
AveragesMap() {};
MeanMap() {};
AveragesMap(std::initializer_list<std::pair<int, Averages>> avgs) :
LevelMap<Averages>(avgs) {};
MeanMap(std::initializer_list<std::pair<int, Mean>> avgs) :
LevelMap<Mean>(avgs) {};
AveragesMap(const MonteCarloMap &mcMap) {
MeanMap(const MonteCarloMap &mcMap) {
Update(mcMap);
};
void Update(const MonteCarloMap &mcMap);
};
struct VariancesMap : public LevelMap<Variances> {
struct SVarMap : public LevelMap<SVar> {
VariancesMap() {};
SVarMap() {};
VariancesMap(std::initializer_list<std::pair<int, Variances>> vars) :
LevelMap<Variances>(vars) {};
SVarMap(std::initializer_list<std::pair<int, SVar>> vars) :
LevelMap<SVar>(vars) {};
VariancesMap(const MonteCarloMap &mcMap) {
SVarMap(const MonteCarloMap &mcMap) {
Update(mcMap);
};
......@@ -88,8 +85,8 @@ struct KurtosisMap : LevelMap<Kurtosis> {
struct MultiLevelMonteCarloData {
SampleCounterMap ctrs;
AveragesMap avgs;
VariancesMap vars;
MeanMap avgs;
SVarMap vars;
KurtosisMap kurtosis;
MultiLevelMonteCarloData() {};
......@@ -98,8 +95,8 @@ struct MultiLevelMonteCarloData {
ExtractDataFrom(mcMap);
}
MultiLevelMonteCarloData(const SampleCounterMap &ctrs, const AveragesMap &avgs,
const VariancesMap &vars, const KurtosisMap &kurtosis) :
MultiLevelMonteCarloData(const SampleCounterMap &ctrs, const MeanMap &avgs,
const SVarMap &vars, const KurtosisMap &kurtosis) :
ctrs(ctrs), avgs(avgs), vars(vars), kurtosis(kurtosis) {};
void ExtractDataFrom(const MonteCarloMap &mcMap);
......
......@@ -17,7 +17,7 @@ double Errors::EstimateNumeric(const MultiLevelMonteCarloData &data) {
return EstimateNumeric(data.avgs);
}
double Errors::EstimateNumeric(const AveragesMap& avgs) {
double Errors::EstimateNumeric(const MeanMap& avgs) {
auto _levels = avgs.GetLevelVector();
auto _avgs = avgs.GetYVector();
double alpha = exponents.alpha;
......@@ -39,7 +39,7 @@ double Errors::EstimateStochastic(const MultiLevelMonteCarloData &data) {
return EstimateStochastic(data.ctrs, data.vars);
}
double Errors::EstimateStochastic(const SampleCounterMap &ctrs, const VariancesMap &vars) {
double Errors::EstimateStochastic(const SampleCounterMap &ctrs, const SVarMap &vars) {
auto _ctrs = ctrs.GetMVector();
auto _vars = vars.GetYVector();
......
......@@ -17,11 +17,11 @@ struct Errors {
double EstimateNumeric(const MultiLevelMonteCarloData &data);
double EstimateNumeric(const AveragesMap& avgs);
double EstimateNumeric(const MeanMap& avgs);
double EstimateStochastic(const MultiLevelMonteCarloData &data);
double EstimateStochastic(const SampleCounterMap &ctrs, const VariancesMap &vars);
double EstimateStochastic(const SampleCounterMap &ctrs, const SVarMap &vars);
};
#endif //ERRORS_HPP
......@@ -8,13 +8,13 @@ void Exponents::ComputeExponents(const MultiLevelMonteCarloData &data) {
ComputeExponents(data.avgs, data.vars);
}
void Exponents::ComputeExponents(const AveragesMap &avgs, const VariancesMap &vars) {
alpha = ComputeAlpha(avgs);
beta = ComputeBeta(vars);
gamma = ComputeGamma(avgs);
void Exponents::ComputeExponents(const MeanMap &mean, const SVarMap &sVar) {
alpha = ComputeAlpha(mean);
beta = ComputeBeta(sVar);
gamma = ComputeGamma(mean);
}
double Exponents::ComputeAlpha(const AveragesMap &avgs) {
double Exponents::ComputeAlpha(const MeanMap &avgs) {
auto _levels = avgs.GetLevelVector();
auto _avgsY = avgs.GetYVector();
......@@ -27,7 +27,7 @@ double Exponents::ComputeAlpha(const AveragesMap &avgs) {
return linearFit(levels, avgsY).first;
}
double Exponents::ComputeBeta(const VariancesMap &vars) {
double Exponents::ComputeBeta(const SVarMap &vars) {
auto _levels = vars.GetLevelVector();
auto _varsY = vars.GetYVector();
......@@ -40,7 +40,7 @@ double Exponents::ComputeBeta(const VariancesMap &vars) {
return linearFit(levels, varsY).first;
}
double Exponents::ComputeGamma(const AveragesMap &avgs) {
double Exponents::ComputeGamma(const MeanMap &avgs) {
auto _levels = avgs.GetLevelVector();
auto _avgsCost = avgs.GetCostVector();
......
......@@ -23,13 +23,13 @@ struct Exponents {
void ComputeExponents(const MonteCarloMap &mcMap);
void ComputeExponents(const AveragesMap &avgs, const VariancesMap &vars);
void ComputeExponents(const MeanMap &mean, const SVarMap &sVar);
double ComputeAlpha(const AveragesMap &avgs);
double ComputeAlpha(const MeanMap &avgs);
double ComputeBeta(const VariancesMap &vars);
double ComputeBeta(const SVarMap &vars);
double ComputeGamma(const AveragesMap &avgs);
double ComputeGamma(const MeanMap &avgs);
std::pair<double, double> linearFit(std::vector<double> &x, std::vector<double> &y);
......
......@@ -20,40 +20,176 @@ struct SampleCounter {
}
void UpdateParallel(int commSplit) {
pout << "test" << endl;
M = PPM->SumOnCommSplit(Mcomm, 0) / PPM->Size(commSplit);
dM = PPM->SumOnCommSplit(dMcomm, 0) / PPM->Size(commSplit);
}
friend Logging &operator<<(Logging &s, const SampleCounter &ctr) {
return s << "M=" << ctr.M << " dM=" << ctr.dM
<< " MComm=" << ctr.Mcomm << " dMComm=" << ctr.dMcomm << endl;
return s << "M=" << ctr.M
<< " dM=" << ctr.dM
<< " MComm=" << ctr.Mcomm
<< " dMComm=" << ctr.dMcomm << endl;
}
};
struct Mean {
double C = 0.0;
double Q = 0.0;
double Y = 0.0;
double Qcomm = 0.0;
double Ycomm = 0.0;
double Ccomm = 0.0;
void Update(double dC, double dQ, double dY, SampleCounter ctr) {
Ccomm += dC / ctr.Mcomm;
Qcomm += dQ / ctr.Mcomm;
Ycomm += dY / ctr.Mcomm;
}
void UpdateParallel(SampleCounter ctr, int commSplit) {
C = abs(PPM->SumAcrossComm(ctr.Mcomm * Ccomm, commSplit) / ctr.M);
Q = abs(PPM->SumAcrossComm(ctr.Mcomm * Qcomm, commSplit) / ctr.M);
Y = abs(PPM->SumAcrossComm(ctr.Mcomm * Ycomm, commSplit) / ctr.M);
}
friend Logging &operator<<(Logging &s, const Mean &mean) {
return s << "MeanQ=" << mean.Q
<< " MeanY=" << mean.Y
<< " MeanC=" << mean.C
<< " MeanQcomm=" << mean.Qcomm
<< " MeanYcomm=" << mean.Ycomm
<< " MeanCcomm=" << mean.Ccomm << endl;
}
};
struct SVar {
double C = 0.0;
double Q = 0.0;
double Y = 0.0;
double Ccomm = 0.0;
double Qcomm = 0.0;
double Ycomm = 0.0;
void Update(double C2comm, double Q2comm, double Y2comm, SampleCounter ctr) {
Ccomm = C2comm / (ctr.Mcomm - 1);
Qcomm = Q2comm / (ctr.Mcomm - 1);
Ycomm = Y2comm / (ctr.Mcomm - 1);
}
void UpdateParallel(int commSplit) {
}
friend Logging &operator<<(Logging &s, const SVar &sVar) {
return s << "SVarQ=" << sVar.Q
<< " SVarY=" << sVar.Y
<< " SVarC=" << sVar.C
<< " SVarQcomm=" << sVar.Qcomm
<< " SVarYcomm=" << sVar.Ycomm
<< " SVarCcomm=" << sVar.Ccomm << endl;
}
};
struct Skewness {
double C = 0.0;
double Q = 0.0;
double Y = 0.0;
double Ccomm = 0.0;
double Qcomm = 0.0;
double Ycomm = 0.0;
void Update() {
}
void UpdateParallel(int commSplit) {
}
friend Logging &operator<<(Logging &s, const Skewness &skewness) {
return s << "S[Q]=" << skewness.Q << " S[Y]=" << skewness.Y << endl;
}
};
struct Kurtosis {
double C = 0.0;
double Q = 0.0;
double Y = 0.0;
double Ccomm = 0.0;
double Qcomm = 0.0;
double Ycomm = 0.0;
void Update() {//Averages avgs, Variances vars) {
// Q = (avgs.Q4 - 4.0 * avgs.Q3 * avgs.Q + 6.0 * avgs.Q2 * avgs.Q * avgs.Q -
// 3.0 * avgs.Q * avgs.Q * avgs.Q * avgs.Q) / vars.Q / vars.Q;
// Y = (avgs.Y4 - 4.0 * avgs.Y3 * avgs.Y + 6.0 * avgs.Y2 * avgs.Y * avgs.Y -
// 3.0 * avgs.Y * avgs.Y * avgs.Y * avgs.Y) / vars.Y / vars.Y;
// if (Y > 100.0) Warning("Kurtosis of Y above 100!")
}
void UpdateParallel(int commSplit) {
}
friend Logging &operator<<(Logging &s, const Kurtosis &kurtosis) {
return s << "K[Q]=" << kurtosis.Q << " K[Y]=" << kurtosis.Y << endl;
}
};
class WelfordAggregate {
private:
double C2comm = 0.0;
double Q2comm = 0.0;
double Y2comm = 0.0;
double C3comm = 0.0;
double Q3comm = 0.0;
double Y3comm = 0.0;
double C4comm = 0.0;
double Q4comm = 0.0;
double Y4comm = 0.0;
public:
SampleCounter ctr;
Mean mean;
double MeanQcomm = 0.0;
double Q2comm = 0.0;
double SVarQcomm = 0.0;
SVar sVar;
double MeanYcomm = 0.0;
double Y2comm = 0.0;
double SVarYcomm = 0.0;
Skewness skewness;
Kurtosis kurtosis;
double C2 = 0.0;
double MeanQ = 0.0;
double Q2 = 0.0;
double SVarQ = 0.0;
double MeanY = 0.0;
double Y2 = 0.0;
double SVarY = 0.0;
int commSplit = 0;
double Cost = 0.0;
bool parallel = true;
......@@ -68,35 +204,40 @@ public:
void Update(double newC, double newQ, double newY) {
ctr.Update();
double dCost = newC - Cost;
Cost += dCost / ctr.Mcomm;
double dC = newC - mean.Ccomm;
double dQ = newQ - mean.Qcomm;
double dY = newY - mean.Ycomm;
double dQ = newQ - MeanQcomm;
MeanQcomm += dQ / ctr.Mcomm;
double dQ2 = newQ - MeanQcomm;
Q2comm += dQ * dQ2;
SVarQcomm = Q2comm / (ctr.Mcomm - 1);
mean.Update(dC, dQ, dY, ctr);
double dY = newY - MeanYcomm;
MeanYcomm += dY / ctr.Mcomm;
double dY2 = newY - MeanYcomm;
double dC2 = newC - mean.Ccomm;
double dQ2 = newQ - mean.Qcomm;
double dY2 = newY - mean.Ycomm;
C2comm += dC * dC2;
Q2comm += dQ * dQ2;
Y2comm += dY * dY2;
SVarYcomm = Y2comm / (ctr.Mcomm - 1);
sVar.Update(C2comm, Q2comm, Y2comm, ctr);
}
void UpdateParallel() {
ctr.UpdateParallel(commSplit);
mean.UpdateParallel(ctr, commSplit);
C2 = PPM->SumAcrossComm(C2comm, commSplit);
Q2 = PPM->SumAcrossComm(Q2comm, commSplit);
Y2 = PPM->SumAcrossComm(Y2comm, commSplit);
MeanQ =
(PPM->SumOnCommSplit(ctr.Mcomm * MeanQcomm, 0) / PPM->Size(commSplit)) / ctr.M;
MeanY =
(PPM->SumOnCommSplit(ctr.Mcomm * MeanYcomm, 0) / PPM->Size(commSplit)) / ctr.M;
// delta = avg_b - avg_a
// M2 = M2_a + M2_b + delta ** 2 * n_a * n_b / n
Q2 = PPM->SumOnCommSplit(Q2, 0) / PPM->Size(commSplit);
Y2 = PPM->SumOnCommSplit(Y2, 0) / PPM->Size(commSplit);
sVar.C = C2 / (ctr.M - 1);
sVar.Q = Q2 / (ctr.M - 1);
sVar.Y = Y2 / (ctr.M - 1);
// def parallel_variance(n_a, avg_a, M2_a, n_b, avg_b, M2_b):
// n = n_a + n_b
// n = n_a + n_b <- ctr.UpdateParallel
// delta = avg_b - avg_a
// M2 = M2_a + M2_b + delta ** 2 * n_a * n_b / n
// var_ab = M2 / (n - 1)
......@@ -125,12 +266,7 @@ public:
}
friend Logging &operator<<(Logging &s, const WelfordAggregate &aggregate) {
return s << "M=" << aggregate.ctr.M
<< " dM=" << aggregate.ctr.dM
<< " Mcomm=" << aggregate.ctr.Mcomm
<< " dMcomm=" << aggregate.ctr.dMcomm << endl
<< "MeanQ=" << aggregate.MeanQ << " MeanY=" << aggregate.MeanY
<< " SVarQ=" << aggregate.SVarQ << " SVarY=" << aggregate.SVarY << endl;
return s << aggregate.ctr << aggregate.mean << aggregate.sVar;
}
};
......
......@@ -10,15 +10,15 @@ INSTANTIATE_TEST_SUITE_P(TestMonteCarlo, TestMonteCarloParallelInterval, Values(
));
TEST_P(TestMonteCarloSeriellInterval, TestMethodOnlyFine) {
EXPECT_NEAR(mc.aggregate.MeanQ, 0.0, sqrt(1.0 / dM));
EXPECT_NEAR(mc.aggregate.MeanY, 0.0, sqrt(1.0 / dM));
EXPECT_NEAR(mc.aggregate.mean.Q, 0.0, sqrt(1.0 / dM));
EXPECT_NEAR(mc.aggregate.mean.Y, 0.0, sqrt(1.0 / dM));
// EXPECT_NEAR(mc.aggregate.SVarQ, 1.0, sqrt(10.0 / dM));
// EXPECT_NEAR(mc.aggregate.SVarY, 1.0, sqrt(10.0 / dM));
}
TEST_P(TestMonteCarloParallelInterval, TestMethodOnlyFine) {
EXPECT_NEAR(mc.aggregate.MeanQ, 0.0, sqrt(1.0 / dM));
EXPECT_NEAR(mc.aggregate.MeanY, 0.0, sqrt(1.0 / dM));
EXPECT_NEAR(mc.aggregate.mean.Q, 0.0, sqrt(1.0 / dM));
EXPECT_NEAR(mc.aggregate.mean.Y, 0.0, sqrt(1.0 / dM));
// EXPECT_NEAR(mc.aggregate.SVarQ, 1.0, sqrt(10.0 / dM));
// EXPECT_NEAR(mc.aggregate.SVarY, 1.0, sqrt(10.0 / dM));
}
......
......@@ -20,26 +20,18 @@ struct TestData {
{6, SampleCounter{25, 0, 0, 0}}
};
AveragesMap _avgs = {
{3, Averages(2.0e-02, 4.0e-04, 8.0e-06, 16.0e-08,
2.0e-02, 4.0e-04, 8.0e-06, 16.0e-08,
400)},
{4, Averages(1.0e-02, 2.0e-04, 4.0e-06, 8.0e-08,
1.0e-02, 2.0e-04, 4.0e-06, 8.0e-08,
800)},
{5, Averages(0.5e-02, 1.0e-04, 2.0e-06, 4.0e-08,
0.5e-02, 1.0e-04, 2.0e-06, 4.0e-08,
1600)},
{6, Averages(0.25e-02, 0.5e-04, 1.0e-06, 2.0e-08,
0.25e-02, 0.5e-04, 1.0e-06, 2.0e-08,
3200)}
MeanMap _means = {
{3, Mean{400, 2.0e-02, 2.0e-02}},
{4, Mean{800, 1.0e-02, 1.0e-02}},
{5, Mean{1600, 0.5e-02, 0.5e-02}},
{6, Mean{3200, 0.25e-02, 0.25e-02}}
};
VariancesMap _vars = {
{3, Variances(64.0e-02, 64.0e-02)},
{4, Variances(16.0e-02, 16.0e-02)},
{5, Variances(4.0e-02, 4.0e-02)},
{6, Variances(1.0e-02, 1.0e-02)}
SVarMap _sVars = {
{3, SVar{0.0, 64.0e-02, 64.0e-02}},
{4, SVar{0.0, 16.0e-02, 16.0e-02}},
{5, SVar{0.0, 4.0e-02, 4.0e-02}},
{6, SVar{0.0, 1.0e-02, 1.0e-02}}
};
KurtosisMap _kurtosis = {
......@@ -49,14 +41,14 @@ struct TestData {
{6, Kurtosis()}
};
const MultiLevelMonteCarloData _data{_ctrs, _avgs, _vars, _kurtosis};
const MultiLevelMonteCarloData _data{_ctrs, _means, _sVars, _kurtosis};