Commit 3c2b5e5c authored by niklas.baumgarten's avatar niklas.baumgarten
Browse files

moved sample counter into aggregate

parent 3f7560ae
Pipeline #148696 passed with stages
in 48 minutes and 28 seconds
......@@ -8,7 +8,7 @@ void MonteCarlo::Method() {
aggregate.UpdateParallel();
vout(2) << sums;
avgs.Update(sums, aggregate.M);
avgs.Update(sums, aggregate.ctr.M);
vars.Update(avgs);
kurtosis.Update(avgs, vars);
......@@ -20,15 +20,15 @@ void MonteCarlo::Method() {
}
void MonteCarlo::method() {
SampleSolution coarseSolution(pdeSolver->GetDisc(), coarseId);
SampleSolution fineSolution(pdeSolver->GetDisc(), fineId);
while (aggregate.dMcomm != 0) {
computeSampleSolution(aggregate.index(), fineId, fineSolution);
if (onlyFine) coarseSolution.Init();
else computeSampleSolution(aggregate.index(), coarseId, coarseSolution);
aggregate.Update(fineSolution, coarseSolution);
sums.Update(fineSolution, coarseSolution); // todo rmv
SampleSolution cSolution(pdeSolver->GetDisc(), cId);
SampleSolution fSolution(pdeSolver->GetDisc(), fId);
while (aggregate.ctr.dMcomm != 0) {
computeSampleSolution(aggregate.index(), fId, fSolution);
if (onlyFine) cSolution.Init();
else computeSampleSolution(aggregate.index(), cId, cSolution);
aggregate.Update(fSolution, cSolution);
sums.Update(fSolution, cSolution); // todo rmv
vout(3) << sums; // todo rmv
}
}
......
......@@ -2,6 +2,7 @@
#define MLMC_MC_HPP
#include "montecarlo/datastructure/EmpiricMeasures.hpp"
#include "montecarlo/datastructure/WelfordAggregate.hpp"
#include "pdesolver/PDESolverCreator.hpp"
#include "mesh/MeshesCreator.hpp"
......@@ -26,8 +27,6 @@ protected:
public:
WelfordAggregate aggregate;
SampleCounter ctr; // todo remove, is still here for test
Sums sums;
Averages avgs;
......@@ -40,9 +39,9 @@ public:
MeshesCreator meshesCreator;
SampleID coarseId;
SampleID cId;
SampleID fineId;
SampleID fId;
MonteCarlo(int level, int dM, bool onlyFine, bool parallel) :
level(level), onlyFine(onlyFine), parallel(parallel),
......@@ -64,14 +63,12 @@ public:
config.get("MCVerbose", verbose);
config.get("MCParallel", parallel);
fineId.fLevel = level;
fineId.cLevel = level - 1;
fineId.coarse = false;
coarseId.fLevel = level;
coarseId.cLevel = level - 1;
coarseId.coarse = true;
ctr.parallel = parallel; // todo rmv
ctr.UpdateSampleCounter(dM); // todo rmv
fId.fLevel = level;
fId.cLevel = level - 1;
fId.coarse = false;
cId.fLevel = level;
cId.cLevel = level - 1;
cId.coarse = true;
aggregate.parallel = parallel;
aggregate.UpdateSampleCounter(dM);
......
......@@ -36,7 +36,7 @@ void MultilevelMonteCarlo::adaptiveMethod() {
bool converged = false;
while (!converged) {
for (auto &[level, mc] : mcMap)
if (mc.aggregate.dM > 0)
if (mc.aggregate.ctr.dM > 0)
mc.Method();
mcMap.UpdateSampleCounter(epsilon);
......
......@@ -13,8 +13,7 @@ void MonteCarloMap::UpdateSampleCounter(double epsilon) {
optimalM = (int) (ceil(2 * pow(epsilon, -2) * factor *
sqrt(mc.vars.Y / mc.avgs.Cost)));
if (optimalM == 1) optimalM++; // Hack
mc.aggregate.UpdateSampleCounter(optimalM - mc.aggregate.M);
mc.ctr.UpdateSampleCounter(optimalM - mc.ctr.M);
mc.aggregate.UpdateSampleCounter(optimalM - mc.aggregate.ctr.M);
}
}
......@@ -35,7 +34,7 @@ void MonteCarloMap::AppendLevel(double epsilon, Exponents exponents, int newLeve
void SampleCounterMap::Update(const MonteCarloMap &mcMap) {
for (auto &[level, mc]: mcMap)
_levelMap[level] = SampleCounter(mc.aggregate);
_levelMap[level] = SampleCounter(mc.aggregate.ctr);
}
bool SampleCounterMap::NoSamplesLeft() {
......
......@@ -2,6 +2,7 @@
#define EMPIRICMEASURELEVELMAPS_HPP
#include "EmpiricMeasures.hpp"
#include "WelfordAggregate.hpp"
#include "basics/Level.hpp"
#include "montecarlo/MonteCarlo.hpp"
#include <math.h>
......
......@@ -42,18 +42,4 @@ void Kurtosis::Update(Averages avgs, Variances vars) {
if (Y > 100.0) Warning("Kurtosis of Y above 100!")
}
void SampleCounter::UpdateSampleCounter(int dM) {
this->dM = dM;
if (parallel) {
for (int i = 0; i < ceil(log2(PPM->Size(0))) + 1; i++) {
if (dM >= PPM->Size(i)) {
dMComm = ceil((double) dM / PPM->Size(i));
commSplit = PPM->MaxCommSplit() - i;
break;
}
}
} else {
dMComm = dM;
commSplit = 0;
}
}
......@@ -86,144 +86,4 @@ struct Kurtosis {
}
};
struct WelfordAggregate { // todo make class
int Mcomm = 0;
int dMcomm = 0;
double MeanQcomm = 0.0;
double Q2comm = 0.0;
double SVarQcomm = 0.0;
double MeanYcomm = 0.0;
double Y2comm = 0.0;
double SVarYcomm = 0.0;
double MeanQ = 0.0;
double Q2 = 0.0;
double SVarQ = 0.0;
double MeanY = 0.0;
double Y2 = 0.0;
double SVarY = 0.0;
int dM = 0;
int M = 0;
int commSplit = 0;
double Cost = 0.0;
bool parallel = true;
public:
void Update(const SampleSolution &fineSolution, const SampleSolution &coarseSolution) {
double newC = fineSolution.Cost;
double newQ = fineSolution.Q;
double newY = fineSolution.Q - coarseSolution.Q;
Update(newC, newQ, newY);
}
void Update(double newC, double newQ, double newY) {
Mcomm++;
dMcomm--;
double dCost = newC - Cost;
Cost += dCost / Mcomm;
double dQ = newQ - MeanQcomm;
MeanQcomm += dQ / Mcomm;
double dQ2 = newQ - MeanQcomm;
Q2comm += dQ * dQ2;
SVarQcomm = Q2comm / (Mcomm - 1);
double dY = newY - MeanYcomm;
MeanYcomm += dY / Mcomm;
double dY2 = newY - MeanYcomm;
Y2comm += dY * dY2;
SVarYcomm = Y2comm / (Mcomm - 1);
}
void UpdateParallel() {
M = PPM->SumOnCommSplit(Mcomm, 0) / PPM->Size(commSplit);
dM = PPM->SumOnCommSplit(dMcomm, 0) / PPM->Size(commSplit);
MeanQ = (PPM->SumOnCommSplit(Mcomm * MeanQcomm, 0) / PPM->Size(commSplit)) / M;
MeanY = (PPM->SumOnCommSplit(Mcomm * MeanYcomm, 0) / PPM->Size(commSplit)) / M;
Q2 = PPM->SumOnCommSplit(Q2, 0) / PPM->Size(commSplit);
Y2 = PPM->SumOnCommSplit(Y2, 0) / PPM->Size(commSplit);
// def parallel_variance(n_a, avg_a, M2_a, n_b, avg_b, M2_b):
// n = n_a + n_b
// delta = avg_b - avg_a
// M2 = M2_a + M2_b + delta ** 2 * n_a * n_b / n
// var_ab = M2 / (n - 1)
// return var_ab
}
int index() {
return M + Mcomm + dMcomm * PPM->Proc(0);
// return M + dMcomm * PPM->Proc(0);
}
void UpdateSampleCounter(int dM) {
this->dM = dM;
if (parallel) {
for (int i = 0; i < ceil(log2(PPM->Size(0))) + 1; i++) {
if (dM >= PPM->Size(i)) {
dMcomm = ceil((double) dM / PPM->Size(i));
commSplit = PPM->MaxCommSplit() - i;
break;
}
}
} else {
dMcomm = dM;
commSplit = 0;
}
}
friend Logging &operator<<(Logging &s, const WelfordAggregate &aggregate) {
return s << "M=" << aggregate.M << " dM=" << aggregate.dM
<< " Mcomm=" << aggregate.Mcomm << " dMcomm=" << aggregate.dMcomm << endl
<< "MeanQ=" << aggregate.MeanQ << " MeanY=" << aggregate.MeanY
<< " SVarQ=" << aggregate.SVarQ << " SVarY=" << aggregate.SVarY << endl;
}
};
struct SampleCounter {
int M = 0;
int dM = 0;
int dMComm = 0;
int commSplit = 0;
int parallel = true;
SampleCounter() {};
SampleCounter(WelfordAggregate aggregate) {
M = aggregate.M;
dM = aggregate.dM;
dMComm = aggregate.dMcomm;
parallel = aggregate.parallel;
commSplit = aggregate.commSplit;
}
SampleCounter(int dM, bool parallel = true) : parallel(parallel) {
UpdateSampleCounter(dM);
}
void UpdateSampleCounter(int dM);
SampleCounter(int M, int dM) :
M(M), dM(dM) {}
void Update() {
M += dM;
dMComm = 0;
dM = 0;
}
friend Logging &operator<<(Logging &s, const SampleCounter &ctr) {
return s << "M=" << ctr.M << " dM=" << ctr.dM << " dMComm=" << ctr.dMComm << endl;
}
};
#endif //EMPIRICMEASURES_HPP
......@@ -14,10 +14,10 @@ struct TestData {
};
SampleCounterMap _ctrs = {
{3, SampleCounter(1600, 0)},
{4, SampleCounter(400, 0)},
{5, SampleCounter(100, 0)},
{6, SampleCounter(25, 0)}
{3, SampleCounter{1600, 0, 0, 0}},
{4, SampleCounter{400, 0, 0, 0}},
{5, SampleCounter{100, 0, 0, 0}},
{6, SampleCounter{25, 0, 0, 0}}
};
AveragesMap _avgs = {
......@@ -53,7 +53,7 @@ struct TestData {
MonteCarloMap GenerateMonteCarloMapWithData() {
for (auto &[level, mc] : _mcMap) {
mc.ctr = _ctrs[level];
mc.aggregate.ctr = _ctrs[level];
mc.avgs = _avgs[level];
mc.vars = _vars[level];
mc.kurtosis = _kurtosis[level];
......
......@@ -19,8 +19,8 @@ TEST_F(TestEmpiricMeasureLevelMaps, TestMonteCarloMapUpdateSampleCounter) {
mcMap.UpdateSampleCounter(0.01);
for (auto &[level, ctr] : data.ctrs) {
EXPECT_TRUE(data.ctrs[level].dM == 0);
EXPECT_TRUE(mcMap.find(level)->second.ctr.dM != 0);
EXPECT_TRUE(data.ctrs[level].M == mcMap.find(level)->second.ctr.M);
EXPECT_TRUE(mcMap.find(level)->second.aggregate.ctr.dM != 0);
EXPECT_TRUE(data.ctrs[level].M == mcMap.find(level)->second.aggregate.ctr.M);
}
}
......
......@@ -3,7 +3,7 @@
#include "TestEnvironment.hpp"
#include "montecarlo/datastructure/EmpiricMeasures.hpp"
#include "montecarlo/datastructure/WelfordAggregate.hpp"
#include "MeshesCreator.hpp"
#include "generators/NormalDistribution.hpp"
......@@ -31,64 +31,64 @@ protected:
TEST_F(TestWelfordAggregate, TestWith4Samples) {
aggregate.UpdateSampleCounter(4);
if (PPM->Size(0) == 8) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.dMcomm, 1);
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.ctr.dMcomm, 1);
EXPECT_EQ(aggregate.commSplit, 2);
}
if (PPM->Size(0) == 4) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.dMcomm, 1);
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.ctr.dMcomm, 1);
EXPECT_EQ(aggregate.commSplit, 2);
}
if (PPM->Size(0) == 2) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.dMcomm, 2);
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.ctr.dMcomm, 2);
EXPECT_EQ(aggregate.commSplit, 1);
}
if (PPM->Size(0) == 1) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit
<< endl;
EXPECT_EQ(aggregate.dMcomm, 4);
EXPECT_EQ(aggregate.ctr.dMcomm, 4);
EXPECT_EQ(aggregate.commSplit, 0);
}
pout << "Mcomm before update " << aggregate.Mcomm << endl;
while (aggregate.dMcomm != 0)
pout << "Mcomm before update " << aggregate.ctr.Mcomm << endl;
while (aggregate.ctr.dMcomm != 0)
aggregate.Update(1.0, 1.0, 1.0);
pout << "Mcomm after update " << aggregate.Mcomm << endl;
pout << "Mcomm after update " << aggregate.ctr.Mcomm << endl;
aggregate.UpdateParallel();
pout << "M after update " << aggregate.M << endl;
EXPECT_EQ(aggregate.M, 4);
pout << "M after update " << aggregate.ctr.M << endl;
EXPECT_EQ(aggregate.ctr.M, 4);
}
TEST_F(TestWelfordAggregate, TestWith8Samples) {
aggregate.UpdateSampleCounter(8);
if (PPM->Size(0) == 8) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.dMcomm, 1);
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.ctr.dMcomm, 1);
EXPECT_EQ(aggregate.commSplit, 3);
}
if (PPM->Size(0) == 4) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.dMcomm, 2);
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.ctr.dMcomm, 2);
EXPECT_EQ(aggregate.commSplit, 2);
}
if (PPM->Size(0) == 2) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.dMcomm, 4);
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.ctr.dMcomm, 4);
EXPECT_EQ(aggregate.commSplit, 1);
}
if (PPM->Size(0) == 1) {
pout << "dMComm " << aggregate.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.dMcomm, 8);
pout << "dMComm " << aggregate.ctr.dMcomm << " commSplit " << aggregate.commSplit << endl;
EXPECT_EQ(aggregate.ctr.dMcomm, 8);
EXPECT_EQ(aggregate.commSplit, 0);
}
pout << "Mcomm before update " << aggregate.Mcomm << endl;
while (aggregate.dMcomm != 0)
pout << "Mcomm before update " << aggregate.ctr.Mcomm << endl;
while (aggregate.ctr.dMcomm != 0)
aggregate.Update(1.0, 1.0, 1.0);
pout << "Mcomm after update " << aggregate.Mcomm << endl;
pout << "Mcomm after update " << aggregate.ctr.Mcomm << endl;
aggregate.UpdateParallel();
pout << "M after update " << aggregate.M << endl;
EXPECT_EQ(aggregate.M, 8);
pout << "M after update " << aggregate.ctr.M << endl;
EXPECT_EQ(aggregate.ctr.M, 8);
}
TEST_F(TestWelfordAggregate, TestWith1e6Samples) {
......@@ -101,10 +101,10 @@ TEST_F(TestWelfordAggregate, TestWith1e6Samples) {
else if (PPM->Size(0) == 2) EXPECT_EQ(aggregate.commSplit, 1);
else if (PPM->Size(0) == 1) EXPECT_EQ(aggregate.commSplit, 0);
EXPECT_EQ(aggregate.dMcomm, numSamples / PPM->Size(0));
EXPECT_EQ(aggregate.ctr.dMcomm, numSamples / PPM->Size(0));
pout << "MeanQcomm before update " << aggregate.MeanQcomm << endl;
while (aggregate.dMcomm != 0) {
while (aggregate.ctr.dMcomm != 0) {
normalDist.DrawSample(SampleID(0, aggregate.index(), false));
double val = normalDist.EvalSample();
aggregate.Update(val, val, val);
......@@ -117,7 +117,7 @@ TEST_F(TestWelfordAggregate, TestWith1e6Samples) {
aggregate.UpdateParallel();
pout << "MeanQ after update " << aggregate.MeanQ << endl;
EXPECT_EQ(aggregate.M, numSamples);
EXPECT_EQ(aggregate.ctr.M, numSamples);
}
#endif //TESTWELFORDAGGREGATE_HPP
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment