static const char* szModule = "lossfunc.cpp";
//------------------------------------------------------------------------------
// module lossfunc.cpp //
// //
// Tracks some loss function results like mean absolute error (MAE) or //
// relative number of misclassifications (MCE). //
// See below or http://www.newty.de/pnc2/sdocu.html for more information. //
// //
// copyright (c) 2000-2003 by Lars Haendel //
// home: www.newty.de //
// //
// This program is free software and can be used under the terms of the //
// GNU licence. See header file for further information and disclaimer. //
// //
//------------------------------------------------------------------------------
// //
// CREATE: You have to pass the # predictions you will make if you //
// want the predicted and original values etc. to be stored. You //
// have to pass the mean learn data output value to enable the class //
// to calculate the 'base-line error', i.e. the error one would have //
// made if always the mean learn data output value would have been //
// predicted. //
// //
// USE: Call function EvPrediction() and pass predicted and original //
// value. Note: If the original value is unknown, just pass an //
// arbitrary value but keep in mind that the loss results will be //
// meaningless then. If predictions were stored, you may call Y_p() //
// to get the predicted values. //
// //
// File I/O: function to write loss results and values if they were //
// stored //
//------------------------------------------------------------------------------
//----------------------------------------------------------------------------------------------------------------------
#include <stdio> // due to sprintf()
#include <iomanip> // setw()
#include <iostream>
#include <math> // fabs()
#include "util.h" //
#include "lossfunc.h" // WritePercentage()
#include "exception.h" // IfTrueThrowTypeA()
//----------------------------------------------------------------------------------------------------------------------
// calculate actual loss-function values
float TLossFunction::Mae() { if(c!=0) return mae/c; else return -1; };
float TLossFunction::Mse() { if(c!=0) return mse/c; else return -1; };
float TLossFunction::Mce() { if(c!=0) return mce/c; else return -1; };
float TLossFunction::nErr() { return mce; }; // # missclassifications
float TLossFunction::InsideRatio() { if(c!=0) return nIns/(float)c; else return -1; }; // ratio of inside predictions
float TLossFunction::MaeIns() { if(nIns!=0) return mae_ins/nIns; else return -1; };
float TLossFunction::MseIns() { if(nIns!=0) return mse_ins/nIns; else return -1; };
float TLossFunction::MceIns() { if(nIns!=0) return mce_ins/nIns; else return -1; };
float TLossFunction::MaeOut() { if(nOut!=0) return mae_out/nOut; else return -1; };
float TLossFunction::MseOut() { if(nOut!=0) return mse_out/nOut; else return -1; };
float TLossFunction::MceOut() { if(nOut!=0) return mce_out/nOut; else return -1; };
float TLossFunction::NormMae(){ if(mae_n!=0) return mae/mae_n; else return -1; };
float TLossFunction::NormMse(){ if(mse_n!=0) return mse/mse_n; else return -1; };
float TLossFunction::MaeBase(){ if(c!=0) return (mae_n/c); else return -1; };
float TLossFunction::MseBase(){ if(c!=0) return (mse_n/c); else return -1; };
//----------------------------------------------------------------------------------------------------------------------
// constructor
TLossFunction::TLossFunction(const float& _y_mean, const int& __nTup/*=-1*/)
{
y_mean = _y_mean; // copy
// reset iteration counter and loss function sums
c = nIns = nOut = 0;
mse = mae = mce = mae_ins = mse_ins = mce_ins = mse_n = mae_n = 0;
mae_out = mse_out = mce_out = 0;
// if # predictions is specified -> allocate memory to store predictions
_y = _y_p = _y_e = __nIns = NULL;
_nTup=__nTup;
if(_nTup>0)
{
_y = new float[_nTup];
_y_p = new float[_nTup];
_y_e = new float[_nTup];
__nIns = new float[_nTup];
}
}
//----------------------------------------------------------------------------------------------------------------------
// destructor
TLossFunction::~TLossFunction()
{
delete[] _y;
delete[] _y_p;
delete[] _y_e;
delete[] __nIns;
}
//----------------------------------------------------------------------------------------------------------------------
// tell one predicted and real value, error is calculated and loss is added to the loss sums
void TLossFunction::EvPrediction(const float& y_p, const float& y, const float _nIns /*=0*/)
{
c++; // increment # of predictions
// b) calculate prediction error
float err = fabs(y - y_p);
float err_n = fabs(y - y_mean); // baseline prediction (always predict mean)
// c) track loss function sums
mse += err * err; // square error sum
mse_n += err_n * err_n; // " (normalized)
mae += err; // absolute error sum
mae_n += err_n; // " (normalized)
float e = (y - ((int) (y_p+0.5))); // # missclassifications
if(e!=0)
{
mce++;
if(_nIns>0) // inside prediction
mce_ins++;
else
mce_out++;
}
if(_nIns>0) // absolute/squared error sums for 'inside' predictions
{
mse_ins += err*err;
mae_ins += err;
nIns++;
}
else // absolute/squared error sums for 'outside' predictions
{
mse_out += err*err;
mae_out += err;
nOut++;
}
if(_nTup<0) // return if prediction should not be stored
return;
// pre-check
IfTrueThrowTypeA(c>_nTup,"Function called to many times! Cannot store predictions anymore!","TLossFunction::Predict"
, szModule);
// store predictions
_y[c-1]=y; // real value
_y_p[c-1]=y_p; // predicted value
_y_e[c-1]=fabs(y-y_p); // prediction error
__nIns[c-1]=_nIns;
}
//----------------------------------------------------------------------------------------------------------------------
// write loass function results to file
void TLossFunction::WriteResults(ofstream& file, const bool& f_Symbolic, const bool& f_IsWithOutput/*=true*/)
{
// pre-check
IfTrueThrowTypeA(c==0,"Function called to before any predictions were made!","TLossFunction::WriteResults",szModule);
// a) write loss function results if real output value was known
if(f_IsWithOutput)
{
file << endl << ComChar << " Loss function results ('-1' means unknown)" << endl;
file << setiosflags(ios::left) << resetiosflags(ios::right); // set left justified output
if(f_Symbolic) // loss results for symbolic output (Mce)
{
file << ComChar << " Mce = " << WritePercentage(Mce(), 8, PREC_LOSS_PERCENTAGE, true) << endl;
file << ComChar << " Mce_ins = " << WritePercentage(MceIns(), 8, PREC_LOSS_PERCENTAGE, true) << endl;
file << ComChar << " Mce_out = " << WritePercentage(MceOut(), 8, PREC_LOSS_PERCENTAGE, true) << endl;
}
else // loss results for continous output (Mae and Mse)
{
file << ComChar << " Mae = " << setw(12) << Mae() << " Mse = " << setw(12) << Mse() << endl;
file << ComChar << " Mae_ins = " << setw(12) << MaeIns() << " Mse_ins = " << setw(12) << MseIns() << endl;
file << ComChar << " Mae_out = " << setw(12) << MaeOut() << " Mse_out = " << setw(12) << MseOut() << endl;
file << ComChar << " Mae_n = " << setw(12) << NormMae() << " Mse_n = " << setw(12) << NormMse() << endl;
file << ComChar << " Mae_base = " << setw(12) << MaeBase() << " Mse_base = " << setw(12) << MseBase() << endl;
}
}
file << endl << ComChar << " " << WritePercentage(InsideRatio(), 0, 1, true) << " inside predictions" << endl;
// b) if predictions were stored -> write them to file
if(_nTup>0)
if(f_IsWithOutput) // output is given: write it additionally together with error
{
file << endl << ComChar << " Format: predicted output | real output | error | insides" << endl;
for(int i=0;i<c;i++)
file << _y_p[i] << " " << _y[i] << " " << _y_e[i] << " " << __nIns[i] << endl;
}
else // no output is given: write only predicted values and # insides
{
file << endl << ComChar << " Format: predicted output | insides" << endl;
for(int i=0;i<c;i++)
file << _y_p[i] << " " << __nIns[i] << endl;
}
else
{
#ifdef GUI
file << endl << ComChar << " No predictions were stored!" << endl;
#else
file << endl << ComChar << " Memory save mode: No predictions were stored! Unset flag 'SaveMemory'" << endl;
file << ComChar << " in section '[Basic]' if you want to log the predictions." << endl;
#endif
}
}