static const char* szModule = "SetResult.cpp";

//------------------------------------------------------------------------------
//    module SetResult.cpp                                                    //
//                                                                            //
//    A) TParaSetResults encapsulates loss function results and model         //
//       characteristics for amodel learned and tested several times on       //
//       different data.                                                      //
//                                                                            //
//    B) Class TParaSetResults encapsulates several TParaSet objects, i.e.    //
//       stores the results of several parameter sets used to learn and test  //
//       a model on several different data sets.                              //
//                                                                            //
//    copyright (c) 2001-2003 by Lars Haendel                                 //
//    mail: lore17@newty.de                                                   //
//    home: www.newty.de                                                      //
//                                                                            //
//                                                                            //
//    This program is free software and can be used under the terms of the    //
//    GNU licence. See header file for further information and disclaimer.    //
//                                                                            //
//------------------------------------------------------------------------------
//                                                                            //
// A) USE: Call functions SetLossObjectX() and others to pass the loss        //
//    functions or to set the model characteristic values. Call               //
//    function Result(Id) to get the stored results indexed by Id. Use        //
//    StringToCriterionId() and CriterionIdToString() to convert from         //
//    Id to string and vice versa. Call Calculate() to calculate mean         //
//    and deviation of results (except for 'pVal' and 'Rank') over all        //
//    repetitions. To calculate mean and deviation for 'pVal' and 'Rank'      //
//    call Calculate2().                                                      //
//                                                                            //
//    NOTE: Holds values for each test run as well as mean and                //
//    deviation calculated over all test runs.                                //
//                                                                            //
//    File I/O: Write functions                                               //
//------------------------------------------------------------------------------
//                                                                            //
// B) CREATE: specify the # of different parameter sets and the # of          //
//    different repetitions done for each                                     //
//                                                                            //
//    USE: Call function Get() to get reference to a specific result          //
//    object. Then you can initialize it. When all result objects are         //
//    ready you can call CalculateRankAndPVal() to calculate the              //
//    statistics which compare each parameter set to the others using         //
//    statistical tests ('pVal') or a kind of averaged rank ('Rank').         //
//                                                                            //
//    File I/O: Write functions                                               //
//------------------------------------------------------------------------------



#include <math>                  // due to:  sqrt()

#include "SetResult.h"
#ifndef RELEASE
#include "statistics.h"          //          statistical tests
#endif
#include "exception.h"           //          IfTrueThrowTypeA()
#include <iomanip>               //          setw()

#define WIDTH (int) 10



//----------------------------------------------------------------------------------------------------------------------
// criterion names and access function
// Note: MUST be kept consistent!
// if you add a criterion in 'szCriterion' you've also got to
//   a) increase 'nCriterion' in header file
//   b) add criterion percentage flag and precision setting
//   c) write the code in TParaSetResult::Result() which returns the criterion value given it's Id
//   d) if it's a criterion which needs calculation of loss on learn data add it in LossOnLearnData()


// criterion names
static const char szCriterion[nCriterion][32]
   = { /*0*/  "Mae_T"    , "Mae_L"    , "Mse_T"     , "Mse_L"  , "N_Err_T",
       /*5*/  "N_Err_L"  , "Mce_T"    , "Mce_L"     , "K"      , "K'",
       /*10*/ "VarPerCub", "SizeFac"  , "Inside_T"  , "Inside_L", "Mce_T_Ins",
       /*15*/ "Mce_T_Out", "Mae_T_Ins", "Mae_T_Out" , "pVal"   , "Rank",
       /*20*/ "Hitrate" };


// flag indicates that criterion should be displayed as percentage
static const bool f_Percentage[nCriterion]
   = { /*0*/  false, false, false, false, false,
       /*5*/  false, true , true , false, false,
       /*10*/ false, false, true , true , true,
       /*15*/ true , false, false, false, false,
       /*20*/ true  };


// precision
static const int precision[nCriterion]
   = { /*0*/  3, 3, 3, 3, 0,
       /*5*/  0, 2, 2, 0, 0,
       /*10*/ 1, 1, 1, 1, 2,
       /*15*/ 2, 3, 3, 2, 0,
       /*20*/ 1  };



//----------------------------------------------------------------------------------------------------------------------
// convert criterion Id to criterion name
const char* CriterionIdToString(const int& id)
{
// IfTrueThrowTypeA(id<0 || id>=nCriterion, "Value of 'id' is out of range!", "CriterionIdToString", szModule);
   if(id<0 || id>=nCriterion)
      return "unknown";
   else
      return szCriterion[id];
}


//----------------------------------------------------------------------------------------------------------------------
// convert criterion name to criterion Id
int StringToCriterionId(const char* szString)
{
   for(int i=0;i<nCriterion;i++)
      if(strcmp(szString, szCriterion[i])==0)
         return i;                                    // return criterion Id if match is found

   return -1;                                         // error, criterion name not found
}


//----------------------------------------------------------------------------------------------------------------------
// check if loss on learn data needs to be calculated for given criterions enabled
bool LossOnLearnData(const bool*const& criterion)
{
   // check if a criterion is enabled which needs loss on learn data
   if(!criterion[StringToCriterionId("Mae_L")] && !criterion[StringToCriterionId("Mse_L")] &&
      !criterion[StringToCriterionId("Mce_L")] && !criterion[StringToCriterionId("N_Err_L")] &&
      !criterion[StringToCriterionId("r_Ins_L")])
      return false;           // loss on learn data needed

   // else
   return true;               // no loss on learn data is needed
}



   //-------------------------------------------------------------------------------------------------------------------
   //
   //    class TParaSetResult encapsulates loss function and some other results for one
   //    parameter set, i.e. the mean and single results optained in  'nTest' tests on different
   //    learn and test data


//----------------------------------------------------------------------------------------------------------------------
// constructor
TParaSetResult::TParaSetResult(const int& __nTest, const bool*const& _f_crit_to_calc)
{
   // copy
   _nTest      = __nTest;                                            // # tests/repetitions which will be done
   memcpy(f_crit_to_calc,_f_crit_to_calc, nCriterion*sizeof(bool));  // flag array: enabled criterions

   // create arrays for loss/criterion results for each test/repetition
   loss_T = new TLossFunction*[_nTest];         // loss on test data
   loss_L = new TLossFunction*[_nTest];         // loss on learn data

   _nCuboids   = new int      [_nTest];         // # cuboids in learned model
   _nCuboidsK  = new int      [_nTest];         // # cuboids whose mass exceeds treshold
   _nBounds    = new float    [_nTest];         // mean # of active bounds averaged over all cuboids
   rate        = new float    [_nTest];         // average hitrate
   sizeFac     = new float    [_nTest];         // weighting factor regarding model's size
   _pVal       = new float    [_nTest];         // statistics comparing this parameter set to others
   rank        = new float    [_nTest];         //    "
   times_L     = new clock_t  [_nTest];         // learn time
   times_T     = new clock_t  [_nTest];         // test time


   for(int t=0;t<_nTest;t++)                    // initialize single test results with invalid values
   {
      loss_L[t] = loss_T[t] = NULL;
      _pVal    [t] = rank   [t] =   sizeFac[t] = -42;
      _nCuboids[t] = _nCuboids[t] = _nBounds[t] =-42;
      times_L  [t] = times_T[t] = 0;
   }

   for(int c=0;c<nCriterion;c++)
      mean[c]=std[c] = -42;                     // initialize loss/criterion mean and devition with invalid values
}


//----------------------------------------------------------------------------------------------------------------------
// destructor
TParaSetResult::~TParaSetResult()
{
   // release memory
   if(loss_T)                       // loss objects
      for(int t=0;t<_nTest;t++)
         if(loss_T[t])
            delete loss_T[t];

   if(loss_L)
      for(int t=0;t<_nTest;t++)
         if(loss_L[t])
            delete loss_L[t];
   delete[] loss_T;
   delete[] loss_L;


   delete[] _pVal;                  // other criterions
   delete[] rank;
   delete[] _nCuboids;
   delete[] rate;
   delete[] _nCuboidsK;
   delete[] _nBounds;
   delete[] sizeFac;
   delete[] times_L;
   delete[] times_T;
}


//----------------------------------------------------------------------------------------------------------------------
// start, stop and get time needed to learn or to test
void TParaSetResult::StartClock(const bool& test, const int& testId){                              // start timer
   if(test) times_T[testId] = clock();
   else     times_L[testId] = clock(); };


void TParaSetResult::StopClock(const bool& test, const int& testId){                               // stop timer
   if(test) times_T[testId] = clock()-times_T[testId];
   else     times_L[testId] = clock()-times_L[testId];}


clock_t& TParaSetResult::GetClock(const bool& test, const int& testId/*=-1*/) {        // get time used
   if(testId==-1)
      if(test) return time_T;
      else     return time_L;
   else
      if(test) return times_T[testId];
      else     return times_L[testId];};



//----------------------------------------------------------------------------------------------------------------------
// access function: return specified criterion's value, either mean or value of 'testId'-th test/repetition
float TParaSetResult::Result(const int& critId, const int& testId/*=-1*/)
{
   float r_value=-42;

   // a) return criterion mean value estimated over all tests/repetitions
   if(testId==-1)
   {
      CheckCriterion(critId);                   // check if mean of specified criterion should have been calculated
      r_value = mean[critId];
      CheckValue(r_value, critId, true);        // check if mean really was calculated
      return r_value;                           // return
   }


   // b) return criterion result for 'testId'-th test
   CheckId(testId);                             // check if value of 'testId' is out of range
   switch(critId)
   {
      case 0  : if(loss_T[testId]) r_value = loss_T[testId]->Mae();         else Error(); break;   // Mae test
      case 1  : if(loss_L[testId]) r_value = loss_L[testId]->Mae();         else Error(); break;   //     learn
      case 2  : if(loss_T[testId]) r_value = loss_T[testId]->Mse();         else Error(); break;   // Mse test
      case 3  : if(loss_L[testId]) r_value = loss_L[testId]->Mse();         else Error(); break;   //     learn
      case 4  : if(loss_T[testId]) r_value = loss_T[testId]->nErr();        else Error(); break;   // # missclassifcations test data

      case 5  : if(loss_L[testId]) r_value = loss_L[testId]->nErr();        else Error(); break;   // same on learn data
      case 6  : if(loss_T[testId]) r_value = loss_T[testId]->Mce();         else Error(); break;   // Mce test
      case 7  : if(loss_L[testId]) r_value = loss_L[testId]->Mce();         else Error(); break;   //     learn
      case 8  :                    r_value = _nCuboids[testId];                           break;   // # cuboids
      case 9  :                    r_value = _nCuboidsK[testId];                          break;   // # cuboids whose mass exceedd a specified treshold

      case 10 :                    r_value = _nBounds[testId];                            break;   // average # of active bounds in each cuboid
      case 11 :                    r_value = sizeFac[testId];                             break;
      case 12 : if(loss_T[testId]) r_value = loss_T[testId]->InsideRatio(); else Error(); break;   // ratio of inside predictions on test data
      case 13 : if(loss_L[testId]) r_value = loss_L[testId]->InsideRatio(); else Error(); break;   // same for learn data
      case 14 : if(loss_T[testId]) r_value = loss_T[testId]->MceIns();      else Error(); break;   // Mce test inside prediction

      case 15 : if(loss_T[testId]) r_value = loss_T[testId]->MceOut();      else Error(); break;   //     test outside prediction
      case 16 : if(loss_T[testId]) r_value = loss_T[testId]->MaeIns();      else Error(); break;   // Mae test inside prediction
      case 17 : if(loss_T[testId]) r_value = loss_T[testId]->MaeOut();      else Error(); break;   //     test outside prediction
      case 18 :                    r_value = _pVal[testId];                               break;   // statistics comparing this parameter set to the others
      case 19 :                    r_value = rank[testId];                                break;   //    "

      case 20 :                    r_value = rate[testId];                                break;   // average hitrate

      default : Error();                                                                           // error: invalid criterion Id
   }

   CheckValue(r_value, critId);     // check return value
   return r_value;
}


//----------------------------------------------------------------------------------------------------------------------
// calculate mean and deviation of specified error criterions (except 'pVal' and 'Rank') over all tests/repetitions
void TParaSetResult::Calculate()
{
   // a) mean criterion values over all tests
   for(int i=0;i<nCriterion;i++)                      // note: skip 'pVal' and 'Rank'
      if(f_crit_to_calc[i] && i!=StringToCriterionId("Rank") && i!=StringToCriterionId("pVal"))
      {
         mean[i]=0;
         for(int t=0;t<_nTest;t++)
            mean[i] += Result(i, t);
         mean[i]/=_nTest;
      }


   // b) deviation over all tests
   for(int i=0;i<nCriterion;i++)                      // note: skip 'pVal' and 'Rank'
      if(f_crit_to_calc[i] && i!=StringToCriterionId("Rank") && i!=StringToCriterionId("pVal"))
      {
         std[i]=0;
         for(int t=0;t<_nTest;t++)
         {
            float dif = Result(i, t)-mean[i];
            std[i] += dif*dif;
         }
         if(_nTest>1)                                 // prevent division by zero
            std[i] = sqrt(std[i]/(_nTest-1));
      }


   // c) mean times needed to learn and test
   time_L=0;
   for(int t=0;t<_nTest;t++)
      time_L += times_L[t];
   time_L/=_nTest;                  // learn time
   time_T=0;
   for(int t=0;t<_nTest;t++)
      time_T += times_T[t];
   time_T/=_nTest;                  // test time
}


//----------------------------------------------------------------------------------------------------------------------
// calculate mean and deviation of 'Rank' and 'pVal' over all tests/repetitions
// note: the single 'Rank' and 'pVal' values must have been calculated/set elsewhere
void TParaSetResult::Calculate2()
{
   // a) mean criterion values of rank and pVal over all tests
   for(int i=0;i<nCriterion;i++)
      if(f_crit_to_calc[i] && (i==StringToCriterionId("Rank") || i==StringToCriterionId("pVal")))
      {
         mean[i]=0;
         for(int t=0;t<_nTest;t++)
            mean[i] += Result(i, t);
         mean[i]/=_nTest;
      }


   // b) deviation of rank and pVal over all tests
   for(int i=0;i<nCriterion;i++)
      if(f_crit_to_calc[i] && (i==StringToCriterionId("Rank") || i==StringToCriterionId("pVal")))
      {
         std[i]=0;
         for(int t=0;t<_nTest;t++)
         {
            float dif = Result(i, t)-mean[i];
            std[i] += dif*dif;
         }
         if(_nTest>1)                                 // prevent division by zero
            std[i] = sqrt(std[i]/(_nTest-1));
      }
}


//----------------------------------------------------------------------------------------------------------------------
// write description for specified results
void TParaSetResult::WriteResultDescriptions(ofstream& file, const bool*const& f_Criterion,
    const bool& f_ShowDev/*=false*/, const bool& f_Objective/*=false*/)
{
   char szText[STS];

   if(f_Objective)                                                      // a) write tuning objective name
      file << "  Objective";


   for(int i=0;i<nCriterion;i++)                                        // b) criterion names
      if(f_Criterion[i])                                                // if criterion is enabled
      {
         if(f_ShowDev)                                                        // if deviations are shown ...
            sprintf(szText, " %*s %*s", WIDTH, szCriterion[i], WIDTH, "Dev"); // write criterion name and 'Dev'
         else
            sprintf(szText, " %*s", WIDTH, szCriterion[i]);                   // else write just the criterion name
         file << szText;
      }
   file << "    T_LP      T_T" << endl;                                       // c) write learn/prune and test times
}


//----------------------------------------------------------------------------------------------------------------------
// write deviation for specified criterion
void TParaSetResult::WriteDeviation(const int& criterion, ofstream& file)
{
   if(f_Percentage[criterion])
      file << " " << WritePercentage(Std(criterion), WIDTH-2, max(precision[criterion],1)) <<" %";// write as percentage
   else
      file << " " << ValueToTextFP(Std(criterion), WIDTH, max(precision[criterion], 1));            // write
}


//----------------------------------------------------------------------------------------------------------------------
// write value of specified criterion, either mean or value of t-th test
const char* TParaSetResult::ResultToText(const int& criterion,const int& width,const bool& f_Weight, const int&t/*=-1*/)
{
   static char szText[STS];
   float result = Result(criterion, t);                              // get result value
   if(f_Weight)
      if(criterion==StringToCriterionId("pVal"))                     // weight with model size factor if specified
         result /= Result(StringToCriterionId("SizeFac"), t);        // note: criterion 'pVal' needs to be divided
      else
         result *= Result(StringToCriterionId("SizeFac"), t);

   // write value to text
   if(f_Percentage[criterion])                           // write as percentage
      sprintf(szText, " %s %%", WritePercentage(result, width-3, max(precision[criterion], (int)(t<0))));
   else
      strcpy(szText, ValueToTextFP(result, width, max(precision[criterion], (int) (t<0)) ));   // write as float

   return szText;
}


//----------------------------------------------------------------------------------------------------------------------
// write specified results and learn- and test time
void TParaSetResult::Write(ofstream& file, const bool f_Criterion[], const bool& f_ShowDev, const int& t /*=-1*/)
{
   // a) write specified loss function results
   for(int i=0;i<nCriterion;i++)
      if(f_Criterion[i])                                       // if criterion is enabled ...
      {
         file << " " << ResultToText(i, WIDTH, false, t);      // write result/loss (not weighted) and ...
         if(t==-1 && f_ShowDev)                                // ... if specified and only for mean values (t==-1) ...
            WriteDeviation(i, file);                           // ... write deviation
      }


   // b) times
   // WARNING: Do NOT put the two calls to WriteTime() in one statement! WriteTimes() writes to a static local
   // variable and both calls get evaluated before(!) data is written to the stream.
   file << WriteTime(GetClock(false, t)) << " ";
   file << WriteTime(GetClock(true, t)) << endl;
}


//----------------------------------------------------------------------------------------------------------------------
// checks: throw error if 'testId' is out of range or if loss object is set twice etc.
void TParaSetResult::CheckId(const int& testId){
   IfTrueThrowTypeA(testId<0 || testId>=_nTest, "One of the result functions was called with illegal value of 'testId'!"
                     , "TParaSetResult::CheckId", szModule);}


void TParaSetResult::Error(){
   ThrowTypeA("Requested loss object wasn't set or criterion Id out of range!", "TParaSetResult::Result", szModule);}

void TParaSetResult::SetLossObjectErrorIfTrue(const bool f_Throw){
   IfTrueThrowTypeA(f_Throw, "Loss object was alread set!", "TParaSetResult::SetLossObject", szModule);}



void TParaSetResult::CheckCriterion(const int& criterion)
{
   IfTrueThrowTypeA(criterion<0 || criterion>=nCriterion, "Specified criterion Id is out of range!"
                     , "TParaSetResult::Result", szModule);

   if(!f_crit_to_calc[criterion])
   {
      char szText[STS];          // compose error text
      sprintf(szText, "Criterion '%s' mean was not calculated so far!", CriterionIdToString(criterion));
      ThrowTypeA(szText, "TParaSetResult::Result", szModule);
   }
}

void TParaSetResult::CheckValue(const float& value, const int& criterion, const bool& f_mean/*=false*/)
{
   if(value!=-42)
      return;

   // else: compose error text
   char szText[STS];
   if(f_mean)
      sprintf(szText, "Mean of criterion '%s' not calculated so far!", CriterionIdToString(criterion));
   else
      sprintf(szText, "Criterion '%s' not initialized so far!", CriterionIdToString(criterion));

   ThrowTypeA(szText, "TParaSetResult::Result", szModule);     // and throw
}




   //-------------------------------------------------------------------------------------------------------------------
   //
   //    class TParaSetResults encapsulates several TParaSetResult objects, i.e. it
   //    stores the results of 'nSets' different parameter sets


//----------------------------------------------------------------------------------------------------------------------   
// checks: throw error if 'setId' is out of range
void TParaSetResults::CheckId(const int& setId){
   IfTrueThrowTypeA(setId<0 || setId>=_nSets, "Value of 'setId' out of range!", "TParaSetResults::Get", szModule);}


//----------------------------------------------------------------------------------------------------------------------
// constructor
TParaSetResults::TParaSetResults(const int& __nSets, const int& __nTest, const bool& _f_Regression
                                 , const bool*const& _f_CritToCalc)
{
   // copy
   _nSets       = __nSets;                                       // # different parameter sets
   _nTest       = __nTest;                                       // # test, i.e. repetitions done for each parameter set
   f_Regression = _f_Regression;                                 // flag: indicates regression task
   memcpy(f_CritToCalc, _f_CritToCalc, nCriterion*sizeof(bool));;

   res = new TParaSetResult*[_nSets];                            // new one result object for each parameter set
   for(int r=0;r<_nSets;r++)
      res[r] = new TParaSetResult(_nTest, f_CritToCalc);
}


//----------------------------------------------------------------------------------------------------------------------
// destructor
TParaSetResults::~TParaSetResults()
{
   for(int r=0;r<_nSets;r++)     // release result objects
      delete res[r];
   delete[] res;
}


//----------------------------------------------------------------------------------------------------------------------
// used by tuning routine: calculate 'pVal' and 'Rank' for each single repetition
void TParaSetResults::CalculateRankAndPVal(const int& ranking)
{
   // a) compare the different parameter sets on each(!) test/repetition
   for(int t=0;t<_nTest;t++)
   {
      // get parameter set id's of t-th test sorted by ranking criterion and not(!) weighted by model size factor
      TSorter* sorter = SortedId(ranking, false, t);

      #ifndef RELEASE
      TParaSetResult* winner = res[sorter->GetId(0)];                   // get pointer to 'winner'
      const float* y_err = winner->GetLossObjectTest(t)->Y_Err();       // get winner's error vector
      const int nTup = winner->GetLossObjectTest(t)->nTup();
      #endif

      // calculate and sum up ranks
      if(f_CritToCalc[StringToCriterionId("Rank")])
      {
         float loss=0;                    // ini
         int   rank=0;

         for(int r=0;r<_nSets;r++)        // over all sets
         {
            if(loss!= sorter->GetA(r))                // increment rank if loss is worse
               rank++;
            loss = sorter->GetA(r);
            res[sorter->GetId(r)]->Rank(t) = rank;    // set rank
         }
      }
      delete sorter; // release


      // if error vector exists
      // sum up P or Z statistics  -  note: values are negative cause tuning routine selects set with lowest value
      if(f_CritToCalc[StringToCriterionId("pVal")])
         #ifdef RELEASE
         ThrowTypeA("Criterion 'pVal' should not be set in GNU GPL version!", "TParaSetResults::CalculateRankAndPVal"
                        , szModule);
         #else
         if(y_err)
            for(int r=0;r<_nSets;r++)
            {
               if(res[r]->IsInitialized())
                  if(f_Regression)
                     // t-test
                     res[r]->pVal(t) = -ptTest(y_err, res[r]->GetLossObjectTest(t)->Y_Err(), nTup);
                  else
                     // Z-test for two binomial proportions
                     res[r]->pVal(t) = -pZTest(y_err, res[r]->GetLossObjectTest(t)->Y_Err(), nTup);
               else
                  res[r]->pVal(t) = 0;    // worst value is zero
            }
          else
            ThrowTypeA("You've specified the p-Values to be calculated but the loss objects does not contain any error vectors!"
                           , "TParaSetResults::CalculateRankAndPVal", szModule);
         #endif
   }

   // b) re-calculate mean results for each parameter set
   for(int r=0;r<_nSets;r++)
      res[r]->Calculate2();
}


//----------------------------------------------------------------------------------------------------------------------
// return Id of parameter set with the best results regarding specified tuning objective
int TParaSetResults::FindBest(const int& objective)
{
   // get parameter set id's sorted by tuning objective and weighted by model size factor
   TSorter* sorter=SortedId(objective, true);

   int ans = sorter->GetId(0);                     // get Id of set with lowest criterion value
   delete sorter;
   return ans;
}


//----------------------------------------------------------------------------------------------------------------------
// get parameter set id's sorted regarding specified criterion (eventually weighted by model's size)
// note: compare either mean values or t-th test run's values
TSorter* /*cr*/ TParaSetResults::SortedId(const int& criterion, const bool& f_weight, const int& testId/*=-1*/)
{
   TSorter* sorter = new TSorter(_nSets);                                                 // create

   for(int r=0;r<_nSets;r++)                                                              // initialize

      if(res[r]->IsInitialized())
      {
         sorter->GetA(r) = res[r]->Result(criterion, testId);                                // 1th criterion as specified
         if(f_weight)
            if(criterion==StringToCriterionId("pVal"))            // weight by factor regarding model's size if specified
               sorter->GetA(r) /= res[r]->Result(StringToCriterionId("SizeFac"), testId);
            else
               sorter->GetA(r) *= res[r]->Result(StringToCriterionId("SizeFac"), testId);

         // 2nd criterion is model size (# cuboids * # active variable bounds)
         sorter->GetB(r)=res[r]->Result(StringToCriterionId("K'"))*res[r]->Result(StringToCriterionId("VarPerCub"),testId);
      }
      else
         if(criterion==StringToCriterionId("pVal"))
            sorter->GetA(r) = 0;                               // worst value is zero
         else
            sorter->GetA(r) = MAXFLOAT;                        // worst value is infinity

   sorter->SortAsc();

   return sorter;
}


//----------------------------------------------------------------------------------------------------------------------
// write description (column headers) and write parameters and results for all parameter sets (sorted by tuning
// objective)      note: used to write tuning results
void TParaSetResults::Write(ofstream& file, const bool*const& f_Criterion, const int& objective
                           , const TParaSetList*const& para, const bool& f_ShowDev)
{
   // a) write parameter and result description
   TParaSet::WriteDescription(file, f_Regression);                                  // parameter description
   TParaSetResult::WriteResultDescriptions(file, f_Criterion, f_ShowDev, true);     // results        "


   // b) sort parameter sets regarding specified objective weighted by model size factors
   const TSorter* sorter = SortedId(objective, true);


   // c) write parameters, objective (weighted) and loss function results
   for(int r=0;r<_nSets;r++)
   {
      para->Get(sorter->GetId(r)).WriteParameters(file, f_Regression);                // parameters

      if(res[sorter->GetId(r)]->IsInitialized())
      {
         file << " " << res[sorter->GetId(r)]->ResultToText(objective, WIDTH, true);  // tuning objective weighted
                                                                                      // by model size factor
         res[sorter->GetId(r)]->Write(file, f_Criterion, f_ShowDev);                  // results
      }
      else
         file << setw(WIDTH) << "skipped" << endl;
   }
   delete sorter;    // release
}