Commit f4003e6d authored by niklas.baumgarten's avatar niklas.baumgarten
Browse files

moved functions to cpp

parent de9f9465
#include "WelfordAggregate.hpp"
int WelfordAggregate::findNextPowerOf2(int n) {
n = n - 1;
while (n & n - 1)
n = n & n - 1;
return n << 1;
}
void WelfordAggregate::Update(const SampleSolution &fSol, const SampleSolution &cSol) {
double newW = fSol.W;
double newC = fSol.C;
......@@ -60,9 +53,10 @@ void WelfordAggregate::UpdateSampleCounter(int dM) {
if (parallel) {
for (int i = 0; i < ceil(log2(PPM->Size(0))) + 1; i++) {
if (dM >= PPM->Size(i)) {
int mod = dM % PPM->Size(i);
ctr.dM = dM;
int mod = dM % PPM->Size(i);
if (mod != 0) ctr.dM += (PPM->Size(i) - mod);
ctr.dMcomm = ceil((double) dM / PPM->Size(i));
commSplit = PPM->MaxCommSplit() - i;
break;
......@@ -70,11 +64,124 @@ void WelfordAggregate::UpdateSampleCounter(int dM) {
}
} else {
ctr.dMcomm = dM;
ctr.dM = dM;
commSplit = 0;
ctr.dM = dM;
}
}
Logging &operator<<(Logging &s, const WelfordAggregate &aggregate) {
return s << aggregate.ctr << aggregate.mean << aggregate.sVar;
}
/*
*
*/
void SampleCounter::Update() {
Mcomm++;
dMcomm--;
}
void SampleCounter::UpdateParallel(int commSplit) {
M += PPM->SumAcrossComm(Mcomm, commSplit);
dM = PPM->SumAcrossComm(dMcomm, commSplit);
if (dM != 0) Warning("dM after parallel update not 0.")
}
Logging &operator<<(Logging &s, const SampleCounter &ctr) {
return s << "M=" << ctr.M
<< " dM=" << ctr.dM
<< " MComm=" << ctr.Mcomm
<< " dMComm=" << ctr.dMcomm << endl;
}
void Weights::Update(double _newW) {
newW = _newW;
Wcomm += newW;
W2comm += newW * newW;
}
void Weights::UpdateParallel(int commSplit) {
W = PPM->SumAcrossComm(Wcomm, commSplit);
W2 = PPM->SumAcrossComm(W2comm, commSplit);
}
Logging &operator<<(Logging &s, const Weights &wgt) {
return s << "W=" << wgt.W
<< " W2=" << wgt.W2
<< " WComm=" << wgt.Wcomm
<< " W2Comm=" << wgt.W2comm << endl;
}
void Mean::Update(double dC, double dQ, double dY, SampleCounter ctr, Weights wgt) {
Ccomm += dC * (1.0 / ctr.Mcomm);
Qcomm += dQ * (wgt.newW / wgt.Wcomm);
Ycomm += dY * (wgt.newW / wgt.Wcomm);
}
void Mean::UpdateParallel(SampleCounter ctr, Weights wgt, int commSplit) {
C = abs(PPM->SumAcrossComm(ctr.Mcomm * Ccomm, commSplit) / ctr.M);
if (wgt.W == ctr.M) { // Monte Carlo Case
Q = abs(PPM->SumAcrossComm(ctr.Mcomm * Qcomm, commSplit) / ctr.M);
Y = abs(PPM->SumAcrossComm(ctr.Mcomm * Ycomm, commSplit) / ctr.M);
} else { // Stochastic Collocation Case
Q = abs(PPM->SumAcrossComm(wgt.Wcomm * Qcomm, commSplit));
Y = abs(PPM->SumAcrossComm(wgt.Wcomm * Ycomm, commSplit));
}
}
Logging &operator<<(Logging &s, const Mean &mean) {
return s << "MeanQ=" << mean.Q
<< " MeanY=" << mean.Y
<< " MeanC=" << mean.C << endl
<< " MeanQcomm=" << mean.Qcomm
<< " MeanYcomm=" << mean.Ycomm
<< " MeanCcomm=" << mean.Ccomm << endl;
}
void SVar::Update(double C2comm, double Q2comm, double Y2comm, SampleCounter ctr) {
Ccomm = C2comm / (ctr.Mcomm - 1);
Qcomm = Q2comm / (ctr.Mcomm - 1);
Ycomm = Y2comm / (ctr.Mcomm - 1);
}
void SVar::UpdateParallel(double C2, double Q2, double Y2, SampleCounter ctr) {
C = C2 / (ctr.M - 1);
Q = Q2 / (ctr.M - 1);
Y = Y2 / (ctr.M - 1);
}
Logging &operator<<(Logging &s, const SVar &sVar) {
return s << "SVarQ=" << sVar.Q
<< " SVarY=" << sVar.Y
<< " SVarC=" << sVar.C << endl
<< " SVarQcomm=" << sVar.Qcomm
<< " SVarYcomm=" << sVar.Ycomm
<< " SVarCcomm=" << sVar.Ccomm << endl;
}
void Skewness::Update() {
}
void Skewness::UpdateParallel(int commSplit) {
}
Logging &operator<<(Logging &s, const Skewness &skewness) {
return s << "S[Q]=" << skewness.Q << " S[Y]=" << skewness.Y << endl;
}
void Kurtosis::Update() {
//Averages avgs, Variances vars) {
// Q = (avgs.Q4 - 4.0 * avgs.Q3 * avgs.Q + 6.0 * avgs.Q2 * avgs.Q * avgs.Q -
// 3.0 * avgs.Q * avgs.Q * avgs.Q * avgs.Q) / vars.Q / vars.Q;
// Y = (avgs.Y4 - 4.0 * avgs.Y3 * avgs.Y + 6.0 * avgs.Y2 * avgs.Y * avgs.Y -
// 3.0 * avgs.Y * avgs.Y * avgs.Y * avgs.Y) / vars.Y / vars.Y;
// if (Y > 100.0) Warning("Kurtosis of Y above 100!")
}
void Kurtosis::UpdateParallel(int commSplit) {
}
Logging &operator<<(Logging &s, const Kurtosis &kurtosis) {
return s << "K[Q]=" << kurtosis.Q << " K[Y]=" << kurtosis.Y << endl;
}
......@@ -14,23 +14,11 @@ struct SampleCounter {
int dMcomm = 0;
void Update() {
Mcomm++;
dMcomm--;
}
void Update();
void UpdateParallel(int commSplit) {
M += PPM->SumAcrossComm(Mcomm, commSplit);
dM = PPM->SumAcrossComm(dMcomm, commSplit);
if(dM != 0) Warning("dM after parallel update not 0.")
}
void UpdateParallel(int commSplit);
friend Logging &operator<<(Logging &s, const SampleCounter &ctr) {
return s << "M=" << ctr.M
<< " dM=" << ctr.dM
<< " MComm=" << ctr.Mcomm
<< " dMComm=" << ctr.dMcomm << endl;
}
friend Logging &operator<<(Logging &s, const SampleCounter &ctr);
};
struct Weights {
......@@ -44,23 +32,11 @@ struct Weights {
double W2comm = 0.0;
void Update(double _newW) {
newW = _newW;
Wcomm += newW;
W2comm += newW * newW;
}
void Update(double _newW);
void UpdateParallel(int commSplit) {
W = PPM->SumAcrossComm(Wcomm, commSplit);
W2 = PPM->SumAcrossComm(W2comm, commSplit);
}
void UpdateParallel(int commSplit);
friend Logging &operator<<(Logging &s, const Weights &wgt) {
return s << "W=" << wgt.W
<< " W2=" << wgt.W2
<< " WComm=" << wgt.Wcomm
<< " W2Comm=" << wgt.W2comm << endl;
}
friend Logging &operator<<(Logging &s, const Weights &wgt);
};
struct Mean {
......@@ -76,31 +52,11 @@ struct Mean {
double Ccomm = 0.0;
void Update(double dC, double dQ, double dY, SampleCounter ctr, Weights wgt) {
Ccomm += dC * (1.0 / ctr.Mcomm);
Qcomm += dQ * (wgt.newW / wgt.Wcomm);
Ycomm += dY * (wgt.newW / wgt.Wcomm);
}
void Update(double dC, double dQ, double dY, SampleCounter ctr, Weights wgt);
void UpdateParallel(SampleCounter ctr, Weights wgt, int commSplit) {
C = abs(PPM->SumAcrossComm(ctr.Mcomm * Ccomm, commSplit) / ctr.M);
if (wgt.W == ctr.M) { // Monte Carlo Case
Q = abs(PPM->SumAcrossComm(ctr.Mcomm * Qcomm, commSplit) / ctr.M);
Y = abs(PPM->SumAcrossComm(ctr.Mcomm * Ycomm, commSplit) / ctr.M);
} else { // Stochastic Collocation Case
Q = abs(PPM->SumAcrossComm(wgt.Wcomm * Qcomm, commSplit));
Y = abs(PPM->SumAcrossComm(wgt.Wcomm * Ycomm, commSplit));
}
}
void UpdateParallel(SampleCounter ctr, Weights wgt, int commSplit);
friend Logging &operator<<(Logging &s, const Mean &mean) {
return s << "MeanQ=" << mean.Q
<< " MeanY=" << mean.Y
// << " MeanC=" << mean.C
<< " MeanQcomm=" << mean.Qcomm
<< " MeanYcomm=" << mean.Ycomm << endl;
// << " MeanCcomm=" << mean.Ccomm << endl;
}
friend Logging &operator<<(Logging &s, const Mean &mean);
};
struct SVar {
......@@ -116,26 +72,11 @@ struct SVar {
double Ycomm = 0.0;
void Update(double C2comm, double Q2comm, double Y2comm, SampleCounter ctr) {
Ccomm = C2comm / (ctr.Mcomm - 1);
Qcomm = Q2comm / (ctr.Mcomm - 1);
Ycomm = Y2comm / (ctr.Mcomm - 1);
}
void Update(double C2comm, double Q2comm, double Y2comm, SampleCounter ctr);
void UpdateParallel(double C2, double Q2, double Y2, SampleCounter ctr) {
C = C2 / (ctr.M - 1);
Q = Q2 / (ctr.M - 1);
Y = Y2 / (ctr.M - 1);
}
void UpdateParallel(double C2, double Q2, double Y2, SampleCounter ctr);
friend Logging &operator<<(Logging &s, const SVar &sVar) {
return s << "SVarQ=" << sVar.Q
<< " SVarY=" << sVar.Y
// << " SVarC=" << sVar.C
<< " SVarQcomm=" << sVar.Qcomm
<< " SVarYcomm=" << sVar.Ycomm << endl;
// << " SVarCcomm=" << sVar.Ccomm << endl;
}
friend Logging &operator<<(Logging &s, const SVar &sVar);
};
struct Skewness {
......@@ -151,15 +92,11 @@ struct Skewness {
double Ycomm = 0.0;
void Update() {
}
void Update();
void UpdateParallel(int commSplit) {
}
void UpdateParallel(int commSplit);
friend Logging &operator<<(Logging &s, const Skewness &skewness) {
return s << "S[Q]=" << skewness.Q << " S[Y]=" << skewness.Y << endl;
}
friend Logging &operator<<(Logging &s, const Skewness &skewness);
};
struct Kurtosis {
......@@ -175,21 +112,11 @@ struct Kurtosis {
double Ycomm = 0.0;
void Update() {
//Averages avgs, Variances vars) {
// Q = (avgs.Q4 - 4.0 * avgs.Q3 * avgs.Q + 6.0 * avgs.Q2 * avgs.Q * avgs.Q -
// 3.0 * avgs.Q * avgs.Q * avgs.Q * avgs.Q) / vars.Q / vars.Q;
// Y = (avgs.Y4 - 4.0 * avgs.Y3 * avgs.Y + 6.0 * avgs.Y2 * avgs.Y * avgs.Y -
// 3.0 * avgs.Y * avgs.Y * avgs.Y * avgs.Y) / vars.Y / vars.Y;
// if (Y > 100.0) Warning("Kurtosis of Y above 100!")
}
void Update();
void UpdateParallel(int commSplit) {
}
void UpdateParallel(int commSplit);
friend Logging &operator<<(Logging &s, const Kurtosis &kurtosis) {
return s << "K[Q]=" << kurtosis.Q << " K[Y]=" << kurtosis.Y << endl;
}
friend Logging &operator<<(Logging &s, const Kurtosis &kurtosis);
};
class WelfordAggregate {
......@@ -212,8 +139,6 @@ private:
double Y4comm = 0.0;
static int findNextPowerOf2(int n);
public:
SampleCounter ctr;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment