Commit c4a050a3 authored by niklas.baumgarten's avatar niklas.baumgarten
Browse files

Merge branch '22-remove-counter-sums-averages-variances-kurtosus-objects' into 'feature'

Resolve "Remove Counter Sums Averages Variances Kurtosus Objects"

Closes #22

See merge request mpp/mlmc!29
parents 95df32be ca6aa4ce
Pipeline #149592 passed with stages
in 57 minutes and 35 seconds
......@@ -51,7 +51,7 @@ public:
std::vector<double> GetCostVector() const {
std::vector<double> costVector;
for (auto &&[level, entry]: _levelMap)
costVector.push_back(entry.Cost);
costVector.push_back(entry.C);
return costVector;
}
......
......@@ -3,7 +3,7 @@ add_library(MONTECARLO STATIC
MultilevelMonteCarlo.cpp
datastructure/Errors.cpp
datastructure/Exponents.cpp
datastructure/EmpiricMeasures.cpp
datastructure/WelfordAggregate.cpp
datastructure/EmpiricMeasureLevelMaps.cpp
)
target_link_libraries(MONTECARLO PDESOLVER MPP_LIBRARIES)
......@@ -6,30 +6,19 @@ void MonteCarlo::Method() {
vout(1) << "Start with: " << aggregate;
method();
aggregate.UpdateParallel();
vout(2) << sums;
avgs.Update(sums, aggregate.M);
vars.Update(avgs);
kurtosis.Update(avgs, vars);
aggregate.SVarQ = vars.Q;
aggregate.SVarY = vars.Y;
vout(1) << "End with: " << aggregate;
mout.EndBlock(verbose == 0, "Took");
}
void MonteCarlo::method() {
SampleSolution coarseSolution(pdeSolver->GetDisc(), coarseId);
SampleSolution fineSolution(pdeSolver->GetDisc(), fineId);
while (aggregate.dMcomm != 0) {
computeSampleSolution(aggregate.index(), fineId, fineSolution);
if (onlyFine) coarseSolution.Init();
else computeSampleSolution(aggregate.index(), coarseId, coarseSolution);
aggregate.Update(fineSolution, coarseSolution);
sums.Update(fineSolution, coarseSolution); // todo rmv
vout(3) << sums; // todo rmv
SampleSolution cSolution(pdeSolver->GetDisc(), cId);
SampleSolution fSolution(pdeSolver->GetDisc(), fId);
while (aggregate.ctr.dMcomm != 0) {
computeSampleSolution(aggregate.index(), fId, fSolution);
if (onlyFine) cSolution.Init();
else computeSampleSolution(aggregate.index(), cId, cSolution);
aggregate.Update(fSolution, cSolution);
}
}
......
#ifndef MLMC_MC_HPP
#define MLMC_MC_HPP
#include "montecarlo/datastructure/EmpiricMeasures.hpp"
#include "montecarlo/datastructure/WelfordAggregate.hpp"
#include "pdesolver/PDESolverCreator.hpp"
#include "mesh/MeshesCreator.hpp"
......@@ -13,36 +13,26 @@ protected:
int plotting = 0;
int level;
bool onlyFine;
bool parallel;
int level;
Meshes *meshes;
PDESolver *pdeSolver;
public:
WelfordAggregate aggregate;
SampleCounter ctr; // todo remove, is still here for test
Sums sums;
Averages avgs;
Variances vars;
Kurtosis kurtosis;
PDESolverCreator pdeSolverCreator;
MeshesCreator meshesCreator;
SampleID coarseId;
WelfordAggregate aggregate;
SampleID cId;
SampleID fineId;
SampleID fId;
MonteCarlo(int level, int dM, bool onlyFine, bool parallel) :
level(level), onlyFine(onlyFine), parallel(parallel),
......@@ -64,14 +54,12 @@ public:
config.get("MCVerbose", verbose);
config.get("MCParallel", parallel);
fineId.fLevel = level;
fineId.cLevel = level - 1;
fineId.coarse = false;
coarseId.fLevel = level;
coarseId.cLevel = level - 1;
coarseId.coarse = true;
ctr.parallel = parallel; // todo rmv
ctr.UpdateSampleCounter(dM); // todo rmv
fId.fLevel = level;
fId.cLevel = level - 1;
fId.coarse = false;
cId.fLevel = level;
cId.cLevel = level - 1;
cId.coarse = true;
aggregate.parallel = parallel;
aggregate.UpdateSampleCounter(dM);
......
......@@ -36,7 +36,7 @@ void MultilevelMonteCarlo::adaptiveMethod() {
bool converged = false;
while (!converged) {
for (auto &[level, mc] : mcMap)
if (mc.aggregate.dM > 0)
if (mc.aggregate.ctr.dM > 0)
mc.Method();
mcMap.UpdateSampleCounter(epsilon);
......
......@@ -7,26 +7,28 @@ void MonteCarloMap::UpdateSampleCounter(double epsilon) {
double factor = 0.0;
for (auto &[level, mc] : _levelMap)
factor += sqrt(mc.vars.Y * mc.avgs.Cost);
factor += sqrt(mc.aggregate.sVar.Y * mc.aggregate.mean.C);
for (auto &[level, mc] : _levelMap) {
optimalM = (int) (ceil(2 * pow(epsilon, -2) * factor *
sqrt(mc.vars.Y / mc.avgs.Cost)));
sqrt(mc.aggregate.sVar.Y / mc.aggregate.mean.C)));
if (optimalM == 1) optimalM++; // Hack
mc.aggregate.UpdateSampleCounter(optimalM - mc.aggregate.M);
mc.ctr.UpdateSampleCounter(optimalM - mc.ctr.M);
mc.aggregate.UpdateSampleCounter(optimalM - mc.aggregate.ctr.M);
}
}
void MonteCarloMap::AppendLevel(double epsilon, Exponents exponents, int newLevel) {
if (newLevel <= maxLevel) {
auto oldLevel = this->rbegin();
double varsY = oldLevel->second.vars.Y / pow(2.0, exponents.beta);
double avgsCost = oldLevel->second.avgs.Cost * pow(2.0, exponents.gamma);
double sVarY = oldLevel->second.aggregate.sVar.Y / pow(2.0, exponents.beta);
double avgsCost = oldLevel->second.aggregate.mean.C * pow(2.0, exponents.gamma);
_levelMap.insert(
LevelMonteCarloPair(newLevel, MonteCarlo(newLevel, 0, false, true)));
_levelMap.find(newLevel)->second.vars.Y = varsY;
_levelMap.find(newLevel)->second.avgs.Cost = avgsCost;
{newLevel, MonteCarlo(newLevel, 0, false, true)}
);
_levelMap.find(newLevel)->second.aggregate.sVar.Y = sVarY;
_levelMap.find(newLevel)->second.aggregate.mean.C = avgsCost;
this->UpdateSampleCounter(epsilon);
} else {
Exit ("Maximum level has been reached.")
......@@ -35,7 +37,7 @@ void MonteCarloMap::AppendLevel(double epsilon, Exponents exponents, int newLeve
void SampleCounterMap::Update(const MonteCarloMap &mcMap) {
for (auto &[level, mc]: mcMap)
_levelMap[level] = SampleCounter(mc.aggregate);
_levelMap[level] = SampleCounter(mc.aggregate.ctr);
}
bool SampleCounterMap::NoSamplesLeft() {
......@@ -48,20 +50,20 @@ bool SampleCounterMap::NoSamplesLeft() {
return noSamplesLeft;
}
void AveragesMap::Update(const MonteCarloMap &mcMap) {
void MeanMap::Update(const MonteCarloMap &mcMap) {
for (auto &&[level, mc]: mcMap) {
_levelMap[level] = mc.avgs;
Q += mc.avgs.Y;
Cost += mc.avgs.Cost; // Todo at the right place?
_levelMap[level] = mc.aggregate.mean;
Q += mc.aggregate.mean.Y;
Cost += mc.aggregate.mean.C; // Todo at the right place?
}
}
void VariancesMap::Update(const MonteCarloMap &mcMap) {
void SVarMap::Update(const MonteCarloMap &mcMap) {
for (auto &[level, mc]: mcMap)
_levelMap[level] = mc.vars;
_levelMap[level] = mc.aggregate.sVar;
}
void KurtosisMap::Update(const MonteCarloMap &mcMap) {
for (auto &[level, mc]: mcMap)
_levelMap[level] = mc.kurtosis;
_levelMap[level] = mc.aggregate.kurtosis;
}
#ifndef EMPIRICMEASURELEVELMAPS_HPP
#define EMPIRICMEASURELEVELMAPS_HPP
#include "EmpiricMeasures.hpp"
#include "WelfordAggregate.hpp"
#include "basics/Level.hpp"
#include "montecarlo/MonteCarlo.hpp"
#include <math.h>
......@@ -9,8 +9,6 @@
struct Exponents;
typedef std::pair<int, MonteCarlo> LevelMonteCarloPair;
struct MonteCarloMap : public LevelMap<MonteCarlo> {
int maxLevel = 10;
......@@ -19,7 +17,7 @@ struct MonteCarloMap : public LevelMap<MonteCarlo> {
config.get("maxLevel", maxLevel);
};
MonteCarloMap(std::initializer_list<LevelMonteCarloPair> mcMap) :
MonteCarloMap(std::initializer_list<std::pair<int, MonteCarlo>> mcMap) :
LevelMap<MonteCarlo>(mcMap) {
config.get("maxLevel", maxLevel);
};
......@@ -41,30 +39,30 @@ struct SampleCounterMap : public LevelMap<SampleCounter> {
bool NoSamplesLeft();
};
struct AveragesMap : public LevelMap<Averages> {
struct MeanMap : public LevelMap<Mean> {
double Q = 0.0;
double Cost = 0.0;
AveragesMap() {};
MeanMap() {};
AveragesMap(std::initializer_list<std::pair<int, Averages>> avgs) :
LevelMap<Averages>(avgs) {};
MeanMap(std::initializer_list<std::pair<int, Mean>> avgs) :
LevelMap<Mean>(avgs) {};
AveragesMap(const MonteCarloMap &mcMap) {
MeanMap(const MonteCarloMap &mcMap) {
Update(mcMap);
};
void Update(const MonteCarloMap &mcMap);
};
struct VariancesMap : public LevelMap<Variances> {
struct SVarMap : public LevelMap<SVar> {
VariancesMap() {};
SVarMap() {};
VariancesMap(std::initializer_list<std::pair<int, Variances>> vars) :
LevelMap<Variances>(vars) {};
SVarMap(std::initializer_list<std::pair<int, SVar>> vars) :
LevelMap<SVar>(vars) {};
VariancesMap(const MonteCarloMap &mcMap) {
SVarMap(const MonteCarloMap &mcMap) {
Update(mcMap);
};
......@@ -87,8 +85,8 @@ struct KurtosisMap : LevelMap<Kurtosis> {
struct MultiLevelMonteCarloData {
SampleCounterMap ctrs;
AveragesMap avgs;
VariancesMap vars;
MeanMap avgs;
SVarMap vars;
KurtosisMap kurtosis;
MultiLevelMonteCarloData() {};
......@@ -97,8 +95,8 @@ struct MultiLevelMonteCarloData {
ExtractDataFrom(mcMap);
}
MultiLevelMonteCarloData(const SampleCounterMap &ctrs, const AveragesMap &avgs,
const VariancesMap &vars, const KurtosisMap &kurtosis) :
MultiLevelMonteCarloData(const SampleCounterMap &ctrs, const MeanMap &avgs,
const SVarMap &vars, const KurtosisMap &kurtosis) :
ctrs(ctrs), avgs(avgs), vars(vars), kurtosis(kurtosis) {};
void ExtractDataFrom(const MonteCarloMap &mcMap);
......
#include "EmpiricMeasures.hpp"
#include "utility/Assertion.hpp"
void Sums::Update(SampleSolution &fineSolution, SampleSolution &coarseSolution) {
Cost += fineSolution.Cost;
double Qf = fineSolution.Q;
double Qc = coarseSolution.Q;
double diffQ = Qf - Qc;
Q += Qf;
Q2 += Qf * Qf;
Q3 += Qf * Qf * Qf;
Q4 += Qf * Qf * Qf * Qf;
Y += diffQ;
Y2 += diffQ * diffQ;
Y3 += diffQ * diffQ * diffQ;
Y4 += diffQ * diffQ * diffQ * diffQ;
}
void Averages::Update(Sums sums, int M) {
Cost = sums.Cost / M;
Y = abs(sums.Y / M);
Y2 = sums.Y2 / M;
Y3 = sums.Y3 / M;
Y4 = sums.Y4 / M;
Q = abs(sums.Q / M);
Q2 = sums.Q2 / M;
Q3 = sums.Q3 / M;
Q4 = sums.Q4 / M;
}
void Variances::Update(Averages avgs) {
Y = fmax(avgs.Y2 - avgs.Y * avgs.Y, 1e-10);
Q = fmax(avgs.Q2 - avgs.Q * avgs.Q, 1e-10);
}
void Kurtosis::Update(Averages avgs, Variances vars) {
Q = (avgs.Q4 - 4.0 * avgs.Q3 * avgs.Q + 6.0 * avgs.Q2 * avgs.Q * avgs.Q -
3.0 * avgs.Q * avgs.Q * avgs.Q * avgs.Q) / vars.Q / vars.Q;
Y = (avgs.Y4 - 4.0 * avgs.Y3 * avgs.Y + 6.0 * avgs.Y2 * avgs.Y * avgs.Y -
3.0 * avgs.Y * avgs.Y * avgs.Y * avgs.Y) / vars.Y / vars.Y;
if (Y > 100.0) Warning("Kurtosis of Y above 100!")
}
void SampleCounter::UpdateSampleCounter(int dM) {
this->dM = dM;
if (parallel) {
for (int i = 0; i < ceil(log2(PPM->Size(0))) + 1; i++) {
if (dM >= PPM->Size(i)) {
dMComm = ceil((double) dM / PPM->Size(i));
commSplit = PPM->MaxCommSplit() - i;
break;
}
}
} else {
dMComm = dM;
commSplit = 0;
}
}
#ifndef EMPIRICMEASURES_HPP
#define EMPIRICMEASURES_HPP
#include "basics/Sample.hpp"
#include "Parallel.hpp"
struct Sums {
double Q = 0.0;
double Q2 = 0.0;
double Q3 = 0.0;
double Q4 = 0.0;
double Y = 0.0;
double Y2 = 0.0;
double Y3 = 0.0;
double Y4 = 0.0;
double Cost = 0.0;
// Vector U; Todo; Also be careful with instabilities of difference
//
// Vector V;
Sums() {};
Sums(double Q, double Q2, double Q3, double Q4,
double Y, double Y2, double Y3, double Y4, double Cost) :
Q(Q), Q2(Q2), Q3(Q3), Q4(Q4), Y(Y), Y2(Y2), Y3(Y3), Y4(Y4), Cost(Cost) {}
void Update(SampleSolution &fineSolution, SampleSolution &coarseSolution);
friend Logging &operator<<(Logging &s, const Sums &sums) {
return s << "Sum(Q)=" << sums.Q << " Sum(Q^2)=" << sums.Q2
<< " Sum(Q^3)=" << sums.Q3 << " Sum(Q^4)=" << sums.Q4 << endl
<< "Sum(Y)=" << sums.Y << " Sum(Y^2)=" << sums.Y2
<< " Sum(Y^3)=" << sums.Y3 << " Sum(Y^4)=" << sums.Y4 << endl;
}
};
struct Averages {
double Q = 0.0;
double Q2 = 0.0;
double Q3 = 0.0;
double Q4 = 0.0;
double Y = 0.0;
double Y2 = 0.0;
double Y3 = 0.0;
double Y4 = 0.0;
double Cost = 0.0;
Averages() {};
Averages(double Q, double Q2, double Q3, double Q4,
double Y, double Y2, double Y3, double Y4, double Cost) :
Q(Q), Q2(Q2), Q3(Q3), Q4(Q4), Y(Y), Y2(Y2), Y3(Y3), Y4(Y4), Cost(Cost) {}
void Update(Sums sums, int M);
friend Logging &operator<<(Logging &s, const Averages &avgs) {
return s << "|E[Q]|=" << avgs.Q << " |E[Y]|=" << avgs.Y << endl;
}
};
struct Variances {
double Q = 0.0;
double Y = 0.0;
Variances() {};
Variances(double Q, double Y) : Q(Q), Y(Y) {}
void Update(Averages avgs);
friend Logging &operator<<(Logging &s, const Variances &vars) {
return s << "V[Q]=" << vars.Q << " V[Y]=" << vars.Y << endl;
}
};
struct Kurtosis {
double Q = 0.0;
double Y = 0.0;
void Update(Averages avgs, Variances vars);
friend Logging &operator<<(Logging &s, const Kurtosis &kurtosis) {
return s << "K[Q]=" << kurtosis.Q << " K[Y]=" << kurtosis.Y << endl;
}
};
struct WelfordAggregate { // todo make class
int Mcomm = 0;
int dMcomm = 0;
double MeanQcomm = 0.0;
double Q2comm = 0.0;
double SVarQcomm = 0.0;
double MeanYcomm = 0.0;
double Y2comm = 0.0;
double SVarYcomm = 0.0;
double MeanQ = 0.0;
double Q2 = 0.0;
double SVarQ = 0.0;
double MeanY = 0.0;
double Y2 = 0.0;
double SVarY = 0.0;
int dM = 0;
int M = 0;
int commSplit = 0;
double Cost = 0.0;
bool parallel = true;
public:
void Update(const SampleSolution &fineSolution, const SampleSolution &coarseSolution) {
double newC = fineSolution.Cost;
double newQ = fineSolution.Q;
double newY = fineSolution.Q - coarseSolution.Q;
Update(newC, newQ, newY);
}
void Update(double newC, double newQ, double newY) {
Mcomm++;
dMcomm--;
double dCost = newC - Cost;
Cost += dCost / Mcomm;
double dQ = newQ - MeanQcomm;
MeanQcomm += dQ / Mcomm;
double dQ2 = newQ - MeanQcomm;
Q2comm += dQ * dQ2;
SVarQcomm = Q2comm / (Mcomm - 1);
double dY = newY - MeanYcomm;
MeanYcomm += dY / Mcomm;
double dY2 = newY - MeanYcomm;
Y2comm += dY * dY2;
SVarYcomm = Y2comm / (Mcomm - 1);
}
void UpdateParallel() {
M = PPM->SumOnCommSplit(Mcomm, 0) / PPM->Size(commSplit);
dM = PPM->SumOnCommSplit(dMcomm, 0) / PPM->Size(commSplit);
MeanQ = (PPM->SumOnCommSplit(Mcomm * MeanQcomm, 0) / PPM->Size(commSplit)) / M;
MeanY = (PPM->SumOnCommSplit(Mcomm * MeanYcomm, 0) / PPM->Size(commSplit)) / M;
Q2 = PPM->SumOnCommSplit(Q2, 0) / PPM->Size(commSplit);
Y2 = PPM->SumOnCommSplit(Y2, 0) / PPM->Size(commSplit);
// def parallel_variance(n_a, avg_a, M2_a, n_b, avg_b, M2_b):
// n = n_a + n_b
// delta = avg_b - avg_a
// M2 = M2_a + M2_b + delta ** 2 * n_a * n_b / n
// var_ab = M2 / (n - 1)
// return var_ab
}
int index() {
return M + Mcomm + dMcomm * PPM->Proc(0);
// return M + dMcomm * PPM->Proc(0);
}
void UpdateSampleCounter(int dM) {
this->dM = dM;
if (parallel) {
for (int i = 0; i < ceil(log2(PPM->Size(0))) + 1; i++) {
if (dM >= PPM->Size(i)) {
dMcomm = ceil((double) dM / PPM->Size(i));
commSplit = PPM->MaxCommSplit() - i;
break;
}
}
} else {
dMcomm = dM;
commSplit = 0;
}
}
friend Logging &operator<<(Logging &s, const WelfordAggregate &aggregate) {
return s << "M=" << aggregate.M << " dM=" << aggregate.dM
<< " Mcomm=" << aggregate.Mcomm << " dMcomm=" << aggregate.dMcomm << endl
<< "MeanQ=" << aggregate.MeanQ << " MeanY=" << aggregate.MeanY
<< " SVarQ=" << aggregate.SVarQ << " SVarY=" << aggregate.SVarY << endl;
}
};
struct SampleCounter {
int M = 0;
int dM = 0;
int dMComm = 0;
int commSplit = 0;
int parallel = true;
SampleCounter() {};
SampleCounter(WelfordAggregate aggregate) {
M = aggregate.M;
dM = aggregate.dM;
dMComm = aggregate.dMcomm;
parallel = aggregate.parallel;
commSplit = aggregate.commSplit;
}
SampleCounter(int dM, bool parallel = true) : parallel(parallel) {
UpdateSampleCounter(dM);
<