Index: trunk/MagicSoft/Mars/Changelog
===================================================================
--- trunk/MagicSoft/Mars/Changelog	(revision 7395)
+++ trunk/MagicSoft/Mars/Changelog	(revision 7396)
@@ -18,4 +18,30 @@
 
                                                  -*-*- END OF LINE -*-*-
+
+ 2005/11/14 Thomas Bretz
+
+   * MRFEnergyEst.[h,cc]:
+     - changed to allow for new regression method
+
+   * MRanForest.[h,cc]:
+     - taken TH's implementation of the RF regression 
+       + updated includes
+       + added fUserVal
+       + removed ReadRF, WriteRF -- obsolete
+
+   * MRanForestGrow.[h,cc]:
+     - took out setting up the growing of the forest from this task
+       (currently it is done by MRanForest directly)
+     - adapted to changes in other classes as TH did.
+
+   * MRanTree.[h,cc]:
+     - changes taken from TH
+       + added copy-constructor
+       + upadted to allow regression
+
+   * Makefile, RanForestLinkDef.h:
+     - took out MRanForestCalc and MRanForestFill temporarily
+
+
 
  2005/11/12 Daniela Dorner
Index: trunk/MagicSoft/Mars/NEWS
===================================================================
--- trunk/MagicSoft/Mars/NEWS	(revision 7395)
+++ trunk/MagicSoft/Mars/NEWS	(revision 7396)
@@ -2,4 +2,7 @@
 
  *** Version  <cvs>
+
+   - general: Updated the random forest classes to support also the
+     regression method implemented by Thomas H.
 
    - ganymed: Implemented two new options which allow
Index: trunk/MagicSoft/Mars/macros/optim/rfenergyest.C
===================================================================
--- trunk/MagicSoft/Mars/macros/optim/rfenergyest.C	(revision 7396)
+++ trunk/MagicSoft/Mars/macros/optim/rfenergyest.C	(revision 7396)
@@ -0,0 +1,106 @@
+void rfenergyest()
+{
+    MSequence seqtst("~/Software/Mars/mranforest/sequencemc-test.txt");
+    MSequence seqtrn("~/Software/Mars/mranforest/sequencemc-train.txt");
+
+    if (!seqtst.IsValid())
+    {
+        cout << "Test sequence not valid" << endl;
+        return;
+    }
+    if (!seqtrn.IsValid())
+    {
+        cout << "Train sequence not valid" << endl;
+        return;
+    }
+
+    // --------------------- Setup files --------------------
+    MReadMarsFile read("Events");
+    MReadMarsFile read2("Events");
+    read.DisableAutoScheme();
+    read2.DisableAutoScheme();
+
+    MDirIter iter, iter2;
+    seqtrn.SetupDatRuns(iter,  MSequence::kImages, "~/Software/mc");
+    seqtst.SetupDatRuns(iter2, MSequence::kImages, "~/Software/mc");
+
+    read.AddFiles(iter);
+    read2.AddFiles(iter2);
+
+    // ----------------------- Setup RF ----------------------
+    MHMatrix train("Train");
+    train.AddColumn("MHillas.fSize");
+    train.AddColumn("MHillasSrc.fDist");
+    train.AddColumn("MPointingPos.fZd");
+    /*
+     train.AddColumn("MImagePar.fSizeSinglePixels");
+     train.AddColumn("MHillas.GetArea");
+     train.AddColumn("MNewImagePar.fLeakage1");
+     train.AddColumn("MHillasExt.fM3Long*sign(MHillasSrc.fCosDeltaAlpha)");
+     */
+
+    // ------------------------------------------------------------
+
+    // Last column must contain energy
+    train.AddColumn("MMcEvt.fImpact/100");
+    train.AddColumn("MMcEvt.fTelescopeTheta*kRad2Deg");
+    train.AddColumn("MMcEvt.fEnergy");
+
+    MStatusDisplay *d = new MStatusDisplay;
+
+    // ----------------------- Fill Matrix RF ----------------------
+
+    if(gRandom)
+        delete gRandom;
+    gRandom = new TRandom3();
+
+    MTFillMatrix fill;
+    fill.SetDisplay(d);
+    fill.SetDestMatrix1(&train, 10000);//99999999);
+    fill.SetReader(&read);
+
+    if (!fill.Process())
+        return;
+
+    // ------------------------ Train RF --------------------------
+
+    MRFEnergyEst rf;
+    rf.SetDisplay(d);
+    rf.SetFileName("rfenergys.root");
+
+    MBinning b(32, 10, 100000, "BinningEnergyEst", "log");
+    /*
+    if (!rf.TrainMultiRF(train, b.GetEdgesD()))    // classification
+        return;
+
+    if (!rf.TrainSingleRF(train, b.GetEdgesD()))   // classification
+        return;
+    */
+    if (!rf.TrainSingleRF(train))                  // regression
+        return;
+
+
+    gLog.Separator("Test");
+
+    // --------------------- Display result ----------------------
+    MParList  plist;
+    MTaskList tlist;
+    plist.AddToList(&tlist);
+    plist.AddToList(&b);
+
+    MHEnergyEst hist;
+    MFillH fillh(&hist);
+
+    tlist.AddToList(&read2);
+    tlist.AddToList(&rf);
+    tlist.AddToList(&fillh);
+
+    MEvtLoop loop;
+    loop.SetDisplay(d);
+    loop.SetParList(&plist);
+
+    if (!loop.Eventloop())
+        return;
+
+    //d->SaveAsPS("rfenergys.ps");
+}
Index: trunk/MagicSoft/Mars/mranforest/MRFEnergyEst.cc
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRFEnergyEst.cc	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRFEnergyEst.cc	(revision 7396)
@@ -17,4 +17,5 @@
 !
 !   Author(s): Thomas Hengstebeck 2/2005 <mailto:hengsteb@physik.hu-berlin.de>
+!   Author(s): Thomas Bretz 8/2005 <mailto:tbretz@astro.uni-wuerzburg.de>
 !
 !   Copyright: MAGIC Software Development, 2000-2005
@@ -31,12 +32,4 @@
 #include "MRFEnergyEst.h"
 
-#include <TFile.h>
-#include <TList.h>
-
-#include <TH1.h>
-#include <TH2.h>
-#include <TStyle.h>
-#include <TCanvas.h>
-#include <TMath.h>
 #include <TVector.h>
 
@@ -45,27 +38,23 @@
 #include "MLog.h"
 #include "MLogManip.h"
+
+#include "MData.h"
+#include "MDataArray.h"
+
+#include "MRanForest.h"
+#include "MParameters.h"
 
 #include "MParList.h"
 #include "MTaskList.h"
 #include "MEvtLoop.h"
-
-#include "MRanTree.h"
-#include "MRanForest.h"
 #include "MRanForestGrow.h"
 
-#include "MData.h"
-#include "MParameters.h"
-
 ClassImp(MRFEnergyEst);
 
 using namespace std;
 
-static const TString gsDefName  = "MRFEnergyEst";
-static const TString gsDefTitle = "RF for energy estimation";
-
-// --------------------------------------------------------------------------
-//
-//  Default constructor. Set the name and title of this object
-//
+const TString MRFEnergyEst::gsDefName  = "MRFEnergyEst";
+const TString MRFEnergyEst::gsDefTitle = "RF for energy estimation";
+
 MRFEnergyEst::MRFEnergyEst(const char *name, const char *title)
     : fDebug(kFALSE), fData(0), fEnergyEst(0),
@@ -75,4 +64,9 @@
     fName  = name  ? name  : gsDefName.Data();
     fTitle = title ? title : gsDefTitle.Data();
+
+    // FIXME:
+    fNumTrees = 100; //100
+    fNumTry   = 5;   //3
+    fNdSize   = 0;   //1   0 means: in MRanForest estimated best value will be calculated
 }
 
@@ -82,10 +76,5 @@
 }
 
-// --------------------------------------------------------------------------
-//
-// Train the RF with the goven MHMatrix. The last column must contain the
-// True energy.
-//
-Int_t MRFEnergyEst::Train(const MHMatrix &matrixtrain, const TArrayD &grid)
+Int_t MRFEnergyEst::Train(const MHMatrix &matrixtrain, const TArrayD &grid, Int_t ver)
 {
     gLog.Separator("MRFEnergyEst - Train");
@@ -105,11 +94,4 @@
     }
 
-    const Int_t nbins = grid.GetSize()-1;
-    if (nbins<=0)
-    {
-        *fLog << err << "ERROR - Energy grid not vaild... abort." << endl;
-        return kFALSE;
-    }
-
     // rules (= combination of image par) to be used for energy estimation
     TFile fileRF(fFileName, "recreate");
@@ -122,68 +104,80 @@
     const Int_t nobs = 3; // Number of obsolete columns
 
+    MDataArray &dcol = *matrixtrain.GetColumns();
+
     MDataArray usedrules;
-    for(Int_t i=0; i<ncols-nobs; i++) // -3 is important!!!
-        usedrules.AddEntry((*matrixtrain.GetColumns())[i].GetRule());
-
-    // training of RF for each energy bin
+    for (Int_t i=0; i<ncols-nobs; i++) // -3 is important!!!
+        usedrules.AddEntry(dcol[i].GetRule());
+
+    MDataArray rules(usedrules);
+    rules.AddEntry(ver<2?"Classification":dcol[ncols-1].GetRule());
+
+    // prepare matrix for current energy bin
+    TMatrix mat(matrixtrain.GetM());
+
+    // last column must be removed (true energy col.)
+    mat.ResizeTo(nrows, ncols-nobs+1);
+
+    if (fDebug)
+        gLog.SetNullOutput(kTRUE);
+
+    const Int_t nbins = ver>0 ? 1 : grid.GetSize()-1;
     for (Int_t ie=0; ie<nbins; ie++)
     {
-        TMatrix mat1(nrows, ncols);
-        TMatrix mat0(nrows, ncols);
-
-        // prepare matrix for current energy bin
-        Int_t irow1=0;
-        Int_t irow0=0;
-
-        const TMatrix &m = matrixtrain.GetM();
-        for (Int_t j=0; j<nrows; j++)
+        switch (ver)
         {
-            const Double_t energy = m(j,ncols-1);
-
-            if (energy>grid[ie] && energy<=grid[ie+1])
-                TMatrixFRow(mat1, irow1++) = TMatrixFRow_const(m,j);
-            else
-                TMatrixFRow(mat0, irow0++) = TMatrixFRow_const(m,j);
+        case 0: // Replace Energy Grid by classification
+            {
+                Int_t irows=0;
+                for (Int_t j=0; j<nrows; j++)
+                {
+                    const Double_t energy = matrixtrain.GetM()(j,ncols-1);
+                    const Bool_t   inside = energy>grid[ie] && energy<=grid[ie+1];
+
+                    mat(j, ncols-nobs) = inside ? 1 : 0;
+
+                    if (inside)
+                        irows++;
+                }
+                if (irows==0)
+                    *fLog << warn << "WARNING - Skipping";
+                else
+                    *fLog << inf << "Training RF for";
+
+                *fLog << " energy bin " << ie << " (" << grid[ie] << ", " << grid[ie+1] << ") " << irows << "/" << nrows << endl;
+
+                if (irows==0)
+                    continue;
+            }
+            break;
+
+        case 1: // Use Energy as classifier
+        case 2:
+            for (Int_t j=0; j<nrows; j++)
+                mat(j, ncols-nobs) = matrixtrain.GetM()(j,ncols-1);
+            break;
         }
 
-        const Bool_t invalid = irow1==0 || irow0==0;
-
-        if (invalid)
-            *fLog << warn << "WARNING - Skipping";
-        else
-            *fLog << inf << "Training RF for";
-
-        *fLog << " energy bin " << ie << " (" << grid[ie] << ", " << grid[ie+1] << ") " << irow0 << " " << irow1 << endl;
-
-        if (invalid)
-            continue;
-
-        if (fDebug)
-            gLog.SetNullOutput(kTRUE);
-
-        // last column must be removed (true energy col.)
-        mat1.ResizeTo(irow1, ncols-nobs);
-        mat0.ResizeTo(irow0, ncols-nobs);
-
-        // create MHMatrix as input for RF
-        MHMatrix matrix1(mat1, "MatrixHadrons");
-        MHMatrix matrix0(mat0, "MatrixGammas");
-
-        // training of RF
+        MHMatrix matrix(mat, &rules, "MatrixTrain");
+
+        MParList plist;
         MTaskList tlist;
-        MParList plist;
         plist.AddToList(&tlist);
-        plist.AddToList(&matrix0);
-        plist.AddToList(&matrix1);
+        plist.AddToList(&matrix);
+
+        MRanForest rf;
+        rf.SetNumTrees(fNumTrees);
+        rf.SetNumTry(fNumTry);
+        rf.SetNdSize(fNdSize);
+        rf.SetClassify(ver<2 ? 1 : 0);
+        if (ver==1)
+            rf.SetGrid(grid);
+
+        plist.AddToList(&rf);
 
         MRanForestGrow rfgrow;
-        rfgrow.SetNumTrees(fNumTrees); // number of trees
-        rfgrow.SetNumTry(fNumTry);     // number of trials in random split selection
-        rfgrow.SetNdSize(fNdSize);     // limit for nodesize
-
         tlist.AddToList(&rfgrow);
-    
+
         MEvtLoop evtloop;
-        evtloop.SetDisplay(fDisplay);
         evtloop.SetParList(&plist);
 
@@ -194,11 +188,15 @@
             gLog.SetNullOutput(kFALSE);
 
-        // Calculate bin center
-        const Double_t E = (TMath::Log10(grid[ie])+TMath::Log10(grid[ie+1]))/2;
-
-        // save whole forest
-        MRanForest *forest=(MRanForest*)plist.FindObject("MRanForest");
-        forest->SetUserVal(E);
-        forest->Write(Form("%.10f", E));
+        if (ver==0)
+        {
+            // Calculate bin center
+            const Double_t E = (TMath::Log10(grid[ie])+TMath::Log10(grid[ie+1]))/2;
+
+            // save whole forest
+            rf.SetUserVal(E);
+            rf.SetName(Form("%.10f", E));
+        }
+
+        rf.Write();
     }
 
@@ -224,5 +222,5 @@
     while ((o=Next()))
     {
-        MRanForest *forest;
+        MRanForest *forest=0;
         fileRF.GetObject(o->GetName(), forest);
         if (!forest)
@@ -234,4 +232,6 @@
     }
 
+    // Maybe fEForests[0].fRules yould be used instead?
+
     if (fData->Read("rules")<=0)
     {
@@ -249,8 +249,12 @@
         return kFALSE;
 
+    cout << "MDataArray" << endl;
+
     fData = (MDataArray*)plist->FindCreateObj("MDataArray");
     if (!fData)
         return kFALSE;
 
+    cout << "ReadForests" << endl;
+
     if (!ReadForests(*plist))
     {
@@ -275,13 +279,8 @@
 }
 
-// --------------------------------------------------------------------------
-//
-//
 #include <TGraph.h>
 #include <TF1.h>
 Int_t MRFEnergyEst::Process()
 {
-    static TF1 f1("f1", "gaus");
-
     TVector event;
     if (fTestMatrix)
@@ -290,4 +289,17 @@
         *fData >> event;
 
+    // --------------- Single Tree RF -------------------
+    if (fEForests.GetEntries()==1)
+    {
+        const MRanForest *rf = (MRanForest*)fEForests[0];
+        fEnergyEst->SetVal(rf->CalcHadroness(event));
+        fEnergyEst->SetReadyToSave();
+
+        return kTRUE;
+    }
+
+    // --------------- Multi Tree RF -------------------
+    static TF1 f1("f1", "gaus");
+
     Double_t sume = 0;
     Double_t sumh = 0;
@@ -308,7 +320,10 @@
         const Double_t h = rf->CalcHadroness(event);
         const Double_t e = rf->GetUserVal();
+
         g.SetPoint(g.GetN(), e, h);
+
         sume += e*h;
         sumh += h;
+
         if (h>maxh)
         {
@@ -337,6 +352,6 @@
         fEnergyEst->SetVal(pow(10, f1.GetParameter(1)));
         break;
-
-    }
+    }
+
     fEnergyEst->SetReadyToSave();
 
Index: trunk/MagicSoft/Mars/mranforest/MRFEnergyEst.h
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRFEnergyEst.h	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRFEnergyEst.h	(revision 7396)
@@ -9,11 +9,12 @@
 #include <TObjArray.h>
 #endif
+
 #ifndef ROOT_TArrayD
 #include <TArrayD.h>
 #endif
 
-class MHMatrix;
 class MDataArray;
 class MParameterD;
+class MHMatrix;
 
 class MRFEnergyEst : public MTask
@@ -26,5 +27,9 @@
         kFit
     };
+
 private:
+    static const TString gsDefName;   //! Default Name
+    static const TString gsDefTitle;  //! Default Title
+
     Bool_t       fDebug;      // Debugging of eventloop while training on/off
 
@@ -39,17 +44,21 @@
     Int_t        fNdSize;     //! Training parameters
 
-    MHMatrix    *fTestMatrix; //! Test Matrix
+    MHMatrix    *fTestMatrix; //! Test Matrix used in Process (together with MMatrixLoop)
 
     EstimationMode_t fEstimationMode;
+
+private:
+    // MTask
+    Int_t PreProcess(MParList *plist);
+    Int_t Process();
 
     // MRFEnergyEst
     Int_t ReadForests(MParList &plist);
 
-    // MTask
-    Int_t PreProcess(MParList *plist);
-    Int_t Process();
-
     // MParContainer
     Int_t ReadEnv(const TEnv &env, TString prefix, Bool_t print);
+
+    // Train Interface
+    Int_t Train(const MHMatrix &n, const TArrayD &grid, Int_t ver=2);
 
 public:
@@ -58,15 +67,22 @@
 
     // Setter for estimation
-    void  SetFileName(TString str)     { fFileName = str; }
-    void  SetEstimationMode(EstimationMode_t op) { fEstimationMode = op; }
+    void SetFileName(TString filename)          { fFileName = filename; }
+    void SetEstimationMode(EstimationMode_t op) { fEstimationMode = op; }
 
     // Setter for training
-    void  SetNumTrees(Int_t n=-1)      { fNumTrees = n; }
-    void  SetNdSize(Int_t n=-1)        { fNdSize   = n; }
-    void  SetNumTry(Int_t n=-1)        { fNumTry   = n; }
-    void  SetDebug(Bool_t b=kTRUE)     { fDebug = b; }
+    void SetNumTrees(UShort_t n=100) { fNumTrees = n; }
+    void SetNdSize(UShort_t n=5)     { fNdSize   = n; }
+    void SetNumTry(UShort_t n=0)     { fNumTry   = n; }
+    void SetDebug(Bool_t b=kTRUE)    { fDebug = b; }
 
     // Train Interface
-    Int_t Train(const MHMatrix &n, const TArrayD &grid);
+    Int_t TrainMultiRF(const MHMatrix &n, const TArrayD &grid)
+    {
+        return Train(n, grid, 0);
+    }
+    Int_t TrainSingleRF(const MHMatrix &n, const TArrayD &grid=TArrayD())
+    {
+        return Train(n, grid, grid.GetSize()==0 ? 2 : 1);
+    }
 
     // Test Interface
Index: trunk/MagicSoft/Mars/mranforest/MRanForest.cc
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRanForest.cc	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRanForest.cc	(revision 7396)
@@ -16,7 +16,7 @@
 !
 !
-!   Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@alwa02.physik.uni-siegen.de>
+!   Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@physik.hu-berlin.de>
 !
-!   Copyright: MAGIC Software Development, 2000-2003
+!   Copyright: MAGIC Software Development, 2000-2005
 !
 !
@@ -39,15 +39,15 @@
 // split selection (which is subject to MRanTree::GrowTree())
 //
-// Version 2:
-//  + fUserVal
-//
 /////////////////////////////////////////////////////////////////////////////
 #include "MRanForest.h"
 
-#include <TMatrix.h>
-#include <TRandom3.h>
+#include <TVector.h>
+#include <TRandom.h>
 
 #include "MHMatrix.h"
 #include "MRanTree.h"
+#include "MData.h"
+#include "MDataArray.h"
+#include "MParList.h"
 
 #include "MLog.h"
@@ -62,5 +62,5 @@
 // Default constructor.
 //
-MRanForest::MRanForest(const char *name, const char *title) : fNumTrees(100), fRanTree(NULL),fUsePriors(kFALSE), fUserVal(-1)
+MRanForest::MRanForest(const char *name, const char *title) : fClassify(1), fNumTrees(100), fNumTry(0), fNdSize(1), fRanTree(NULL), fUserVal(-1)
 {
     fName  = name  ? name  : "MRanForest";
@@ -71,11 +71,63 @@
 }
 
+MRanForest::MRanForest(const MRanForest &rf)
+{
+    // Copy constructor
+    fName  = rf.fName;
+    fTitle = rf.fTitle;
+
+    fClassify = rf.fClassify;
+    fNumTrees = rf.fNumTrees;
+    fNumTry   = rf.fNumTry;
+    fNdSize   = rf.fNdSize;
+    fTreeNo   = rf.fTreeNo;
+    fRanTree  = NULL;
+
+    fRules=new MDataArray();
+    fRules->Reset();
+
+    MDataArray *newrules=rf.fRules;
+
+    for(Int_t i=0;i<newrules->GetNumEntries();i++)
+    {
+        MData &data=(*newrules)[i];
+        fRules->AddEntry(data.GetRule());
+    }
+
+    // trees
+    fForest=new TObjArray();
+    fForest->SetOwner(kTRUE);
+
+    TObjArray *newforest=rf.fForest;
+    for(Int_t i=0;i<newforest->GetEntries();i++)
+    {
+        MRanTree *rantree=(MRanTree*)newforest->At(i);
+
+        MRanTree *newtree=new MRanTree(*rantree);
+        fForest->Add(newtree);
+    }
+
+    fHadTrue  = rf.fHadTrue;
+    fHadEst   = rf.fHadEst;
+    fDataSort = rf.fDataSort;
+    fDataRang = rf.fDataRang;
+    fClassPop = rf.fClassPop;
+    fWeight   = rf.fWeight;
+    fTreeHad  = rf.fTreeHad;
+
+    fNTimesOutBag = rf.fNTimesOutBag;
+
+}
+
 // --------------------------------------------------------------------------
-//
 // Destructor. 
-//
 MRanForest::~MRanForest()
 {
     delete fForest;
+}
+
+MRanTree *MRanForest::GetTree(Int_t i)
+{
+    return (MRanTree*)(fForest->At(i));
 }
 
@@ -88,22 +140,119 @@
 }
 
-void MRanForest::SetPriors(Float_t prior_had, Float_t prior_gam)
-{
-    const Float_t sum=prior_gam+prior_had;
-
-    prior_gam/=sum;
-    prior_had/=sum;
-
-    fPrior[0]=prior_had;
-    fPrior[1]=prior_gam;
-
-    fUsePriors=kTRUE;
+void MRanForest::SetNumTry(Int_t n)
+{
+    fNumTry=TMath::Max(n,0);
+}
+
+void MRanForest::SetNdSize(Int_t n)
+{
+    fNdSize=TMath::Max(n,1);
+}
+
+void MRanForest::SetWeights(const TArrayF &weights)
+{
+    const int n=weights.GetSize();
+    fWeight.Set(n);
+    fWeight=weights;
+
+    return;
+}
+
+void MRanForest::SetGrid(const TArrayD &grid)
+{
+    const int n=grid.GetSize();
+
+    for(int i=0;i<n-1;i++)
+        if(grid[i]>=grid[i+1])
+        {
+            *fLog<<inf<<"Grid points must be in increasing order! Ignoring grid."<<endl;
+            return;
+        }
+
+    fGrid=grid;
+
+    //*fLog<<inf<<"Following "<<n<<" grid points are used:"<<endl;
+    //for(int i=0;i<n;i++)
+    //    *fLog<<inf<<" "<<i<<") "<<fGrid[i]<<endl;
+
+    return;
 }
 
 Int_t MRanForest::GetNumDim() const
 {
-    return fGammas ? fGammas->GetM().GetNcols() : 0;
-}
-
+    return fMatrix ? fMatrix->GetNcols() : 0;
+}
+
+Int_t MRanForest::GetNumData() const
+{
+    return fMatrix ? fMatrix->GetNrows() : 0;
+}
+
+Int_t MRanForest::GetNclass() const
+{
+    int maxidx = TMath::LocMax(fClass.GetSize(),fClass.GetArray());
+
+    return int(fClass[maxidx])+1;
+}
+
+void MRanForest::PrepareClasses()
+{
+    const int numdata=fHadTrue.GetSize();
+
+    if(fGrid.GetSize()>0)
+    {
+        // classes given by grid
+        const int ngrid=fGrid.GetSize();
+
+        for(int j=0;j<numdata;j++)
+        {
+            // Array is supposed  to be sorted prior to this call.
+            // If match is found, function returns position of element.
+            // If no match found, function gives nearest element smaller
+            // than value.
+            int k=TMath::BinarySearch(ngrid, fGrid.GetArray(), fHadTrue[j]);
+
+            fClass[j]   = k;
+        }
+
+        int minidx = TMath::LocMin(fClass.GetSize(),fClass.GetArray());
+        int min = fClass[minidx];
+        if(min!=0) for(int j=0;j<numdata;j++)fClass[j]-=min;
+
+    }else{
+        // classes directly given
+        for (Int_t j=0;j<numdata;j++)
+            fClass[j] = int(fHadTrue[j]+0.5);
+    }
+
+    return;
+}
+
+/*
+Bool_t MRanForest::PreProcess(MParList *plist)
+{
+    if (!fRules)
+    {
+        *fLog << err << dbginf << "MDataArray with rules not initialized... aborting." << endl;
+        return kFALSE;
+    }
+
+    if (!fRules->PreProcess(plist))
+    {
+        *fLog << err << dbginf << "PreProcessing of MDataArray failed... aborting." << endl;
+        return kFALSE;
+    }
+
+    return kTRUE;
+}
+*/
+
+Double_t MRanForest::CalcHadroness()
+{
+    TVector event;
+    *fRules >> event;
+
+    return CalcHadroness(event);
+}
 
 Double_t MRanForest::CalcHadroness(const TVector &event)
@@ -117,6 +266,5 @@
     while ((tree=(MRanTree*)Next()))
     {
-        fTreeHad[ntree]=tree->TreeHad(event);
-        hadroness+=fTreeHad[ntree];
+        hadroness+=(fTreeHad[ntree]=tree->TreeHad(event));
         ntree++;
     }
@@ -126,69 +274,96 @@
 Bool_t MRanForest::AddTree(MRanTree *rantree=NULL)
 {
-    if (rantree)
-        fRanTree=rantree;
-    if (!fRanTree)
+    fRanTree = rantree ? rantree:fRanTree;
+
+    if (!fRanTree) return kFALSE;
+
+    MRanTree *newtree=new MRanTree(*fRanTree);
+    fForest->Add(newtree);
+
+    return kTRUE;
+}
+
+Bool_t MRanForest::SetupGrow(MHMatrix *mat,MParList *plist)
+{
+    //-------------------------------------------------------------------
+    // access matrix, copy last column (target) preliminarily
+    // into fHadTrue
+    TMatrix mat_tmp = mat->GetM();
+    int dim         = mat_tmp.GetNcols();
+    int numdata     = mat_tmp.GetNrows();
+
+    fMatrix=new TMatrix(mat_tmp);
+
+    fHadTrue.Set(numdata);
+    fHadTrue.Reset(0);
+
+    for (Int_t j=0;j<numdata;j++)
+        fHadTrue[j] = (*fMatrix)(j,dim-1);
+
+    // remove last col
+    fMatrix->ResizeTo(numdata,dim-1);
+    dim=fMatrix->GetNcols();
+
+    //-------------------------------------------------------------------
+    // setup labels for classification/regression
+    fClass.Set(numdata);
+    fClass.Reset(0);
+
+    if(fClassify) PrepareClasses();
+
+    //-------------------------------------------------------------------
+    // allocating and initializing arrays
+    fHadEst.Set(numdata);       fHadEst.Reset(0);
+    fNTimesOutBag.Set(numdata); fNTimesOutBag.Reset(0);
+    fDataSort.Set(dim*numdata); fDataSort.Reset(0);
+    fDataRang.Set(dim*numdata); fDataRang.Reset(0);
+
+    if(fWeight.GetSize()!=numdata)
+    {
+        fWeight.Set(numdata);
+        fWeight.Reset(1.);
+        *fLog << inf <<"Setting weights to 1 (no weighting)"<< endl;
+    }
+
+    //-------------------------------------------------------------------
+    // setup rules to be used for classification/regression
+    MDataArray *allrules=(MDataArray*)mat->GetColumns();
+    if(allrules==NULL)
+    {
+        *fLog << err <<"Rules of matrix == null, exiting"<< endl;
         return kFALSE;
-
-    fForest->Add((MRanTree*)fRanTree->Clone());
-
-    return kTRUE;
-}
-
-Int_t MRanForest::GetNumData() const
-{
-    return fHadrons && fGammas ? fHadrons->GetM().GetNrows()+fGammas->GetM().GetNrows() : 0;
-}
-
-Bool_t MRanForest::SetupGrow(MHMatrix *mhad,MHMatrix *mgam)
-{
-    // pointer to training data
-    fHadrons=mhad;
-    fGammas=mgam;
-
-    // determine data entries and dimension of Hillas-parameter space
-    //fNumHad=fHadrons->GetM().GetNrows();
-    //fNumGam=fGammas->GetM().GetNrows();
-
-    const Int_t dim = GetNumDim();
-
-    if (dim!=fGammas->GetM().GetNcols())
-        return kFALSE;
-
-    const Int_t numdata = GetNumData();
-
-    // allocating and initializing arrays
-    fHadTrue.Set(numdata);
-    fHadTrue.Reset();
-    fHadEst.Set(numdata);
-
-    fPrior.Set(2);
-    fClassPop.Set(2);
-    fWeight.Set(numdata);
-    fNTimesOutBag.Set(numdata);
-    fNTimesOutBag.Reset();
-
-    fDataSort.Set(dim*numdata);
-    fDataRang.Set(dim*numdata);
-
-    // calculating class populations (= no. of gammas and hadrons)
-    fClassPop.Reset();
-    for(Int_t n=0;n<numdata;n++)
-        fClassPop[fHadTrue[n]]++;
-
-    // setting weights taking into account priors
-    if (!fUsePriors)
-        fWeight.Reset(1.);
-    else
-    {
-        for(Int_t j=0;j<2;j++)
-            fPrior[j] *= (fClassPop[j]>=1) ? (Float_t)numdata/fClassPop[j]:0;
-
-        for(Int_t n=0;n<numdata;n++)
-            fWeight[n]=fPrior[fHadTrue[n]];
-    }
-
-    // create fDataSort
-    CreateDataSort();
+    }
+
+    fRules=new MDataArray(); fRules->Reset();
+    TString target_rule;
+
+    for(Int_t i=0;i<dim+1;i++)
+    {
+        MData &data=(*allrules)[i];
+        if(i<dim)
+            fRules->AddEntry(data.GetRule());
+        else
+            target_rule=data.GetRule();
+    }
+
+    *fLog << inf <<endl;
+    *fLog << inf <<"Setting up RF for training on target:"<<endl<<" "<<target_rule.Data()<<endl;
+    *fLog << inf <<"Following rules are used as input to RF:"<<endl;
+
+    for(Int_t i=0;i<dim;i++)
+    {
+        MData &data=(*fRules)[i];
+        *fLog<<inf<<" "<<i<<") "<<data.GetRule()<<endl<<flush;
+    }
+
+    *fLog << inf <<endl;
+
+    //-------------------------------------------------------------------
+    // prepare (sort) data for fast optimization algorithm
+    if(!CreateDataSort()) return kFALSE;
+
+    //-------------------------------------------------------------------
+    // access and init tree container
+    fRanTree = (MRanTree*)plist->FindCreateObj("MRanTree");
 
     if(!fRanTree)
@@ -197,19 +372,29 @@
         return kFALSE;
     }
-    fRanTree->SetRules(fGammas->GetColumns());
+
+    fRanTree->SetClassify(fClassify);
+    fRanTree->SetNdSize(fNdSize);
+
+    if(fNumTry==0)
+    {
+        double ddim = double(dim);
+
+        fNumTry=int(sqrt(ddim)+0.5);
+        *fLog<<inf<<endl;
+        *fLog<<inf<<"Set no. of trials to the recommended value of round("<<sqrt(ddim)<<") = ";
+        *fLog<<inf<<fNumTry<<endl;
+
+    }
+    fRanTree->SetNumTry(fNumTry);
+
+    *fLog<<inf<<endl;
+    *fLog<<inf<<"Following settings for the tree growing are used:"<<endl;
+    *fLog<<inf<<" Number of Trees : "<<fNumTrees<<endl;
+    *fLog<<inf<<" Number of Trials: "<<fNumTry<<endl;
+    *fLog<<inf<<" Final Node size : "<<fNdSize<<endl;
+
     fTreeNo=0;
 
     return kTRUE;
-}
-
-void MRanForest::InitHadEst(Int_t from, Int_t to, const TMatrix &m, TArrayI &jinbag)
-{
-    for (Int_t ievt=from;ievt<to;ievt++)
-    {
-        if (jinbag[ievt]>0)
-            continue;
-        fHadEst[ievt] += fRanTree->TreeHad(m, ievt-from);
-        fNTimesOutBag[ievt]++;
-    }
 }
 
@@ -224,34 +409,58 @@
     fTreeNo++;
 
+    //-------------------------------------------------------------------
     // initialize running output
+
+    float minfloat=fHadTrue[TMath::LocMin(fHadTrue.GetSize(),fHadTrue.GetArray())];
+    Bool_t calcResolution=(minfloat>0.001);
+
     if (fTreeNo==1)
     {
         *fLog << inf << endl;
-        *fLog << underline; // << "1st col        2nd col" << endl;
-        *fLog << "no. of tree    error in % (calulated using oob-data -> overestim. of error)" << endl;
+        *fLog << underline;
+
+        if(calcResolution)
+            *fLog << "no. of tree    no. of nodes    resolution in % (from oob-data -> overest. of error)" << endl;
+        else
+            *fLog << "no. of tree    no. of nodes    rms in % (from oob-data -> overest. of error)" << endl;
+                     //        12345678901234567890123456789012345678901234567890
     }
 
     const Int_t numdata = GetNumData();
-
+    const Int_t nclass  = GetNclass();
+
+    //-------------------------------------------------------------------
     // bootstrap aggregating (bagging) -> sampling with replacement:
-    //
-    // The integer k is randomly (uniformly) chosen from the set
-    // {0,1,...,fNumData-1}, which is the set of the index numbers of
-    // all events in the training sample
-    TArrayF classpopw(2);
+
+    TArrayF classpopw(nclass);
     TArrayI jinbag(numdata); // Initialization includes filling with 0
     TArrayF winbag(numdata); // Initialization includes filling with 0
 
+    float square=0; float mean=0;
+
     for (Int_t n=0; n<numdata; n++)
     {
+        // The integer k is randomly (uniformly) chosen from the set
+        // {0,1,...,numdata-1}, which is the set of the index numbers of
+        // all events in the training sample
+  
         const Int_t k = Int_t(gRandom->Rndm()*numdata);
 
-        classpopw[fHadTrue[k]]+=fWeight[k];
+        if(fClassify)
+            classpopw[fClass[k]]+=fWeight[k];
+        else
+            classpopw[0]+=fWeight[k];
+
+        mean  +=fHadTrue[k]*fWeight[k];
+        square+=fHadTrue[k]*fHadTrue[k]*fWeight[k];
+  
         winbag[k]+=fWeight[k];
         jinbag[k]=1;
-    }
-
+
+    }
+
+    //-------------------------------------------------------------------
     // modifying sorted-data array for in-bag data:
-    //
+
     // In bagging procedure ca. 2/3 of all elements in the original
     // training sample are used to build the in-bag data
@@ -261,11 +470,8 @@
     ModifyDataSort(datsortinbag, ninbag, jinbag);
 
-    const TMatrix &hadrons=fHadrons->GetM();
-    const TMatrix &gammas =fGammas->GetM();
-
-    // growing single tree
-    fRanTree->GrowTree(hadrons,gammas,fHadTrue,datsortinbag,
-                       fDataRang,classpopw,jinbag,winbag);
-
+    fRanTree->GrowTree(fMatrix,fHadTrue,fClass,datsortinbag,fDataRang,classpopw,mean,square,
+                       jinbag,winbag,nclass);
+
+    //-------------------------------------------------------------------
     // error-estimates from out-of-bag data (oob data):
     //
@@ -277,38 +483,35 @@
     // determined from oob-data is underestimated, but can still be taken as upper limit.
 
-    const Int_t numhad = hadrons.GetNrows();
-    InitHadEst(0, numhad, hadrons, jinbag);
-    InitHadEst(numhad, numdata, gammas, jinbag);
-    /*
-    for (Int_t ievt=0;ievt<numhad;ievt++)
-    {
-        if (jinbag[ievt]>0)
-            continue;
-        fHadEst[ievt] += fRanTree->TreeHad(hadrons, ievt);
+    for (Int_t ievt=0;ievt<numdata;ievt++)
+    {
+        if (jinbag[ievt]>0) continue;
+
+        fHadEst[ievt] +=fRanTree->TreeHad((*fMatrix), ievt);
         fNTimesOutBag[ievt]++;
-    }
-
-    for (Int_t ievt=numhad;ievt<numdata;ievt++)
-    {
-        if (jinbag[ievt]>0)
-            continue;
-        fHadEst[ievt] += fRanTree->TreeHad(gammas, ievt-numhad);
-        fNTimesOutBag[ievt]++;
-    }
-    */
+
+    }
+
     Int_t n=0;
-    Double_t ferr=0;
+    double ferr=0;
+
     for (Int_t ievt=0;ievt<numdata;ievt++)
-        if (fNTimesOutBag[ievt]!=0)
+    {
+        if(fNTimesOutBag[ievt]!=0)
         {
-            const Double_t val = fHadEst[ievt]/fNTimesOutBag[ievt]-fHadTrue[ievt];
+            float val = fHadEst[ievt]/float(fNTimesOutBag[ievt])-fHadTrue[ievt];
+            if(calcResolution) val/=fHadTrue[ievt];
+
             ferr += val*val;
             n++;
         }
-
+    }
     ferr = TMath::Sqrt(ferr/n);
 
+    //-------------------------------------------------------------------
     // give running output
-    *fLog << inf << setw(5) << fTreeNo << Form("%15.2f", ferr*100) << endl;
+    *fLog << inf << setw(5)  << fTreeNo;
+    *fLog << inf << setw(20) << fRanTree->GetNumEndNodes();
+    *fLog << inf << Form("%20.2f", ferr*100.);
+    *fLog << inf << endl;
 
     // adding tree to forest
@@ -318,50 +521,33 @@
 }
 
-void MRanForest::CreateDataSort()
-{
-    // The values of concatenated data arrays fHadrons and fGammas (denoted in the following by fData,
-    // which does actually not exist) are sorted from lowest to highest.
+Bool_t MRanForest::CreateDataSort()
+{
+    // fDataSort(m,n) is the event number in which fMatrix(m,n) occurs.
+    // fDataRang(m,n) is the rang of fMatrix(m,n), i.e. if rang = r:
+    //   fMatrix(m,n) is the r-th highest value of all fMatrix(m,.).
     //
-    //
-    //                   fHadrons(0,0) ... fHadrons(0,nhad-1)   fGammas(0,0) ... fGammas(0,ngam-1)
-    //                        .                 .                   .                .
-    //  fData(m,n)   =        .                 .                   .                .
-    //                        .                 .                   .                .
-    //                   fHadrons(m-1,0)...fHadrons(m-1,nhad-1) fGammas(m-1,0)...fGammas(m-1,ngam-1)
-    //
-    //
-    // Then fDataSort(m,n) is the event number in which fData(m,n) occurs.
-    // fDataRang(m,n) is the rang of fData(m,n), i.e. if rang = r, fData(m,n) is the r-th highest
-    // value of all fData(m,.). There may be more then 1 event with rang r (due to bagging).
+    // There may be more then 1 event with rang r (due to bagging).
+
     const Int_t numdata = GetNumData();
+    const Int_t dim = GetNumDim();
 
     TArrayF v(numdata);
     TArrayI isort(numdata);
 
-    const TMatrix &hadrons=fHadrons->GetM();
-    const TMatrix &gammas=fGammas->GetM();
-
-    const Int_t numgam = gammas.GetNrows();
-    const Int_t numhad = hadrons.GetNrows();
-
-    for (Int_t j=0;j<numhad;j++)
-        fHadTrue[j]=1;
-
-    for (Int_t j=0;j<numgam;j++)
-        fHadTrue[j+numhad]=0;
-
-    const Int_t dim = GetNumDim();
+
     for (Int_t mvar=0;mvar<dim;mvar++)
     {
-        for(Int_t n=0;n<numhad;n++)
+
+        for(Int_t n=0;n<numdata;n++)
         {
-            v[n]=hadrons(n,mvar);
+            v[n]=(*fMatrix)(n,mvar);
             isort[n]=n;
-        }
-
-        for(Int_t n=0;n<numgam;n++)
-        {
-            v[n+numhad]=gammas(n,mvar);
-            isort[n+numhad]=n;
+
+            if(TMath::IsNaN(v[n]))
+            {
+                *fLog << err <<"Event no. "<<n<<", matrix column no. "<<mvar;
+                *fLog << err <<" has the value NaN."<<endl;
+                return kFALSE;
+            }
         }
 
@@ -371,4 +557,13 @@
         // of that v[n], which is the n-th from the lowest (assume the original
         // event numbers are 0,1,...).
+
+        // control sorting
+        for(int n=1;n<numdata;n++)
+            if(v[isort[n-1]]>v[isort[n]])
+            {
+                *fLog << err <<"Event no. "<<n<<", matrix column no. "<<mvar;
+                *fLog << err <<" not at correct sorting position."<<endl;
+                return kFALSE;
+            }
 
         for(Int_t n=0;n<numdata-1;n++)
@@ -388,4 +583,5 @@
         fDataSort[(mvar+1)*numdata-1]=isort[numdata-1];
     }
+    return kTRUE;
 }
 
Index: trunk/MagicSoft/Mars/mranforest/MRanForest.h
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRanForest.h	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRanForest.h	(revision 7396)
@@ -4,12 +4,4 @@
 #ifndef MARS_MParContainer
 #include "MParContainer.h"
-#endif
-
-#ifndef MARS_MRanTree
-#include "MRanTree.h"
-#endif
-
-#ifndef MARS_MDataArray
-#include "MDataArray.h"
 #endif
 
@@ -26,82 +18,90 @@
 #endif
 
-#ifndef ROOT_TObjArray
-#include <TObjArray.h>
-#endif
+class TMatrix;
+class TVector;
+class TObjArray;
 
-#ifndef ROOT_TRandom
-#include <TRandom.h>
-#endif
-
+class MRanTree;
+class MDataArray;
 class MHMatrix;
-class MRanTree;
-class TVector;
-class TMatrix;
+class MParList;
 
 class MRanForest : public MParContainer
 {
 private:
-    Int_t fNumTrees;
-    Int_t fTreeNo;      //!
+    Int_t fClassify;
 
-    MRanTree *fRanTree; //!
-    TObjArray *fForest;
+    Int_t fNumTrees;       // Number of trees
+    Int_t fNumTry;         // Number of tries
+    Int_t fNdSize;         // Size of node
+
+    Int_t fTreeNo;         //! Number of tree
+
+    MRanTree   *fRanTree;  //! Pointer to some tree
+    MDataArray *fRules;    //! Pointer to corresponding rules
+    TObjArray  *fForest;   //  Array containing forest
 
     // training data
-    MHMatrix *fGammas;  //!
-    MHMatrix *fHadrons; //!
+    TMatrix *fMatrix;      //!
 
     // true  and estimated hadronness
-    TArrayI fHadTrue;   //!
-    TArrayF fHadEst;    //!
+    TArrayI fClass;        //!
+    TArrayD fGrid;         //!
+    TArrayF fHadTrue;      //!
+    TArrayF fHadEst;       //!
 
     // data sorted according to parameters
-    TArrayI fDataSort;  //!
-    TArrayI fDataRang;  //!
-    TArrayI fClassPop;  //!
+    TArrayI fDataSort;     //!
+    TArrayI fDataRang;     //!
+    TArrayI fClassPop;     //!
 
     // weights
-    Bool_t  fUsePriors; //!
-    TArrayF fPrior;     //!
-    TArrayF fWeight;    //!
-    TArrayI fNTimesOutBag;//!
+    TArrayF fWeight;       //!
+    TArrayI fNTimesOutBag; //!
 
     // estimates for classification error of growing forest
-    TArrayD fTreeHad;   //
+    TArrayD fTreeHad;      // Hadronness values
 
-    Double_t fUserVal;
-
-    void InitHadEst(Int_t from, Int_t to, const TMatrix &m, TArrayI &jinbag);
+    Double_t fUserVal;     // A user value describing this tree (eg E-mc)
 
 protected:
     // create and modify (->due to bagging) fDataSort
-    void CreateDataSort();
+    Bool_t CreateDataSort();
     void ModifyDataSort(TArrayI &datsortinbag, Int_t ninbag, const TArrayI &jinbag);
 
 public:
     MRanForest(const char *name=NULL, const char *title=NULL);
+    MRanForest(const MRanForest &rf);
+
     ~MRanForest();
 
-    // initialize forest
-    void SetPriors(Float_t prior_had, Float_t prior_gam);
+    void SetGrid(const TArrayD &grid);
+    void SetWeights(const TArrayF &weights);
     void SetNumTrees(Int_t n);
 
-    // tree growing
-    //void   SetupForest();
-    Bool_t SetupGrow(MHMatrix *mhad,MHMatrix *mgam);
+    void SetNumTry(Int_t n);
+    void SetNdSize(Int_t n);
+
+    void SetClassify(Int_t n){ fClassify=n; }
+    void PrepareClasses();
+
+        // tree growing
+    Bool_t SetupGrow(MHMatrix *mat,MParList *plist);
     Bool_t GrowForest();
-    void SetCurTree(MRanTree *rantree) { fRanTree=rantree; }
+    void   SetCurTree(MRanTree *rantree) { fRanTree=rantree; }
     Bool_t AddTree(MRanTree *rantree);
-    void SetUserVal(Double_t d) { fUserVal = d; }
+    void   SetUserVal(Double_t d) { fUserVal = d; }
 
     // getter methods
     TObjArray  *GetForest()      { return fForest; }
     MRanTree   *GetCurTree()     { return fRanTree; }
-    MRanTree   *GetTree(Int_t i) { return (MRanTree*)(fForest->At(i)); }
-    MDataArray *GetRules() { return ((MRanTree*)(fForest->At(0)))->GetRules(); }
+    MRanTree   *GetTree(Int_t i);
+    MDataArray *GetRules()       { return fRules; }
+
 
     Int_t      GetNumTrees() const { return fNumTrees; }
     Int_t      GetNumData()  const;
     Int_t      GetNumDim()   const;
+    Int_t      GetNclass()   const;
     Double_t   GetTreeHad(Int_t i) const { return fTreeHad.At(i); }
     Double_t   GetUserVal() const { return fUserVal; }
@@ -109,8 +109,9 @@
     // use forest to calculate hadronness of event
     Double_t CalcHadroness(const TVector &event);
+    Double_t CalcHadroness();
 
     Bool_t AsciiWrite(ostream &out) const;
 
-    ClassDef(MRanForest, 2) // Storage container for tree structure
+    ClassDef(MRanForest, 1) // Storage container for tree structure
 };
 
Index: trunk/MagicSoft/Mars/mranforest/MRanForestGrow.cc
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRanForestGrow.cc	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRanForestGrow.cc	(revision 7396)
@@ -16,7 +16,7 @@
 !
 !
-!   Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@alwa02.physik.uni-siegen.de>
+!   Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@physik.hu-berlin.de>
 !
-!   Copyright: MAGIC Software Development, 2000-2003
+!   Copyright: MAGIC Software Development, 2000-2005
 !
 !
@@ -24,9 +24,9 @@
 
 /////////////////////////////////////////////////////////////////////////////
-//                                                                         //
-//  MRanForestGrow                                                         //
-//                                                                         //
-//  Grows a random forest.                                                 //
-//                                                                         //
+//
+//  MRanForestGrow
+//
+//  Grows a random forest.
+//
 /////////////////////////////////////////////////////////////////////////////
 #include "MRanForestGrow.h"
@@ -38,6 +38,4 @@
 
 #include "MParList.h"
-
-#include "MRanTree.h"
 #include "MRanForest.h"
 
@@ -46,62 +44,25 @@
 using namespace std;
 
-static const TString gsDefName  = "MRead";
-static const TString gsDefTitle = "Tree Classification Loop 1/2";
+const TString MRanForestGrow::gsDefName  = "MRead";
+const TString MRanForestGrow::gsDefTitle = "Task to train a random forst";
 
-// --------------------------------------------------------------------------
-//
-// Setup histograms and the number of distances which are used for
-// avaraging in CalcDist
-//
 MRanForestGrow::MRanForestGrow(const char *name, const char *title)
 {
-    //
     //   set the name and title of this object
-    //
+
     fName  = name  ? name  : gsDefName.Data();
     fTitle = title ? title : gsDefTitle.Data();
 
-    SetNumTrees();
-    SetNumTry();
-    SetNdSize();
+    //     SetNumTrees();
+    //     SetNumTry();
+    //     SetNdSize();
 }
 
-// --------------------------------------------------------------------------
-//
-// Needs:
-//  - MatrixGammas  [MHMatrix]
-//  - MatrixHadrons {MHMatrix]
-//  - MHadroness
-//  - all data containers used to build the matrixes
-//
-// The matrix object can be filles using MFillH. And must be of the same
-// number of columns (with the same meaning).
-//
 Int_t MRanForestGrow::PreProcess(MParList *plist)
 {
-    fMGammas = (MHMatrix*)plist->FindObject("MatrixGammas", "MHMatrix");
-    if (!fMGammas)
+    fMatrix = (MHMatrix*)plist->FindObject("MatrixTrain", "MHMatrix");
+    if (!fMatrix)
     {
-        *fLog << err << dbginf << "MatrixGammas [MHMatrix] not found... aborting." << endl;
-        return kFALSE;
-    }
-
-    fMHadrons = (MHMatrix*)plist->FindObject("MatrixHadrons", "MHMatrix");
-    if (!fMHadrons)
-    {
-        *fLog << err << dbginf << "MatrixHadrons [MHMatrix] not found... aborting." << endl;
-        return kFALSE;
-    }
-
-    if (fMGammas->GetM().GetNcols() != fMHadrons->GetM().GetNcols())
-    {
-        *fLog << err << dbginf << "Error matrices have different numbers of columns... aborting." << endl;
-        return kFALSE;
-    }
-
-    fRanTree = (MRanTree*)plist->FindCreateObj("MRanTree");
-    if (!fRanTree)
-    {
-        *fLog << err << dbginf << "MRanTree not found... aborting." << endl;
+        *fLog << err << dbginf << "MatrixTrain [MHMatrix] not found... aborting." << endl;
         return kFALSE;
     }
@@ -114,27 +75,18 @@
     }
 
-    fRanTree->SetNumTry(fNumTry);
-    fRanTree->SetNdSize(fNdSize);
-    fRanForest->SetCurTree(fRanTree);
-    fRanForest->SetNumTrees(fNumTrees);
+    //     fRanForest->SetNumTry(fNumTry);
+    //     fRanForest->SetNdSize(fNdSize);
+    //     fRanForest->SetNumTrees(fNumTrees);
 
-    return fRanForest->SetupGrow(fMHadrons,fMGammas);
+    return fRanForest->SetupGrow(fMatrix,plist);
 }
 
-// --------------------------------------------------------------------------
-//
-//
 Int_t MRanForestGrow::Process()
 {
-    const Bool_t not_last=fRanForest->GrowForest();
-
-    fRanTree->SetReadyToSave();
-
-    return not_last;
+    return fRanForest->GrowForest();
 }
 
 Int_t MRanForestGrow::PostProcess()
 {
-    fRanTree->SetReadyToSave();
     fRanForest->SetReadyToSave();
 
Index: trunk/MagicSoft/Mars/mranforest/MRanForestGrow.h
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRanForestGrow.h	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRanForestGrow.h	(revision 7396)
@@ -9,17 +9,17 @@
 class MParList;
 class MRanForest;
-class MRanTree;
 
 class MRanForestGrow : public MRead
 {
 private:
-    MRanTree   *fRanTree;
+    static const TString gsDefName;
+    static const TString gsDefTitle;
+
+    //     Int_t fNumTrees;
+    //     Int_t fNumTry;
+    //     Int_t fNdSize;
+
     MRanForest *fRanForest;
-    MHMatrix   *fMGammas;   //! Gammas describing matrix
-    MHMatrix   *fMHadrons;  //! Hadrons (non gammas) describing matrix
-
-    Int_t fNumTrees;
-    Int_t fNumTry;
-    Int_t fNdSize;
+    MHMatrix   *fMatrix;   //! matrix with events
 
     Int_t PreProcess(MParList *pList);
@@ -34,7 +34,7 @@
     MRanForestGrow(const char *name=NULL, const char *title=NULL);
 
-    void SetNumTrees(Int_t n=-1) { fNumTrees=n>0?n:100; }
-    void SetNumTry(Int_t   n=-1) { fNumTry  =n>0?n:  3; }
-    void SetNdSize(Int_t   n=-1) { fNdSize  =n>0?n:  1; }
+    //     void SetNumTrees(Int_t n=-1) { fNumTrees=n>0?n:100; }
+    //     void SetNumTry(Int_t   n=-1) { fNumTry  =n>0?n:  3; }
+    //     void SetNdSize(Int_t   n=-1) { fNdSize  =n>0?n:  1; }
 
     ClassDef(MRanForestGrow, 0) // Task to grow a random forest
Index: trunk/MagicSoft/Mars/mranforest/MRanTree.cc
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRanTree.cc	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRanTree.cc	(revision 7396)
@@ -16,7 +16,7 @@
 !
 !
-!   Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@alwa02.physik.uni-siegen.de>
+!   Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@physik.hu-berlin.de>
 !
-!   Copyright: MAGIC Software Development, 2000-2003
+!   Copyright: MAGIC Software Development, 2000-2005
 !
 !
@@ -38,6 +38,4 @@
 #include <TRandom.h>
 
-#include "MDataArray.h"
-
 #include "MLog.h"
 #include "MLogManip.h"
@@ -47,13 +45,35 @@
 using namespace std;
 
+
 // --------------------------------------------------------------------------
-//
 // Default constructor.
 //
-MRanTree::MRanTree(const char *name, const char *title):fNdSize(0), fNumTry(3), fData(NULL)
+MRanTree::MRanTree(const char *name, const char *title):fClassify(1),fNdSize(0), fNumTry(3)
 {
 
     fName  = name  ? name  : "MRanTree";
     fTitle = title ? title : "Storage container for structure of a single tree";
+}
+
+// --------------------------------------------------------------------------
+// Copy constructor
+//
+MRanTree::MRanTree(const MRanTree &tree)
+{
+    fName  = tree.fName;
+    fTitle = tree.fTitle;
+
+    fClassify = tree.fClassify;
+    fNdSize   = tree.fNdSize;
+    fNumTry   = tree.fNumTry;
+
+    fNumNodes    = tree.fNumNodes;
+    fNumEndNodes = tree.fNumEndNodes;
+
+    fBestVar   = tree.fBestVar;
+    fTreeMap1  = tree.fTreeMap1;
+    fTreeMap2  = tree.fTreeMap2;
+    fBestSplit = tree.fBestSplit;
+    fGiniDec   = tree.fGiniDec;
 }
 
@@ -75,12 +95,12 @@
 }
 
-void MRanTree::GrowTree(const TMatrix &mhad, const TMatrix &mgam,
-                        const TArrayI &hadtrue, TArrayI &datasort,
-                        const TArrayI &datarang, TArrayF &tclasspop, TArrayI &jinbag,
-                        const TArrayF &winbag)
+void MRanTree::GrowTree(TMatrix *mat, const TArrayF &hadtrue, const TArrayI &idclass,
+                        TArrayI &datasort, const TArrayI &datarang, TArrayF &tclasspop,
+                        float &mean, float &square, TArrayI &jinbag, const TArrayF &winbag,
+                        const int nclass)
 {
     // arrays have to be initialized with generous size, so number of total nodes (nrnodes)
     // is estimated for worst case
-    const Int_t numdim =mhad.GetNcols();
+    const Int_t numdim =mat->GetNcols();
     const Int_t numdata=winbag.GetSize();
     const Int_t nrnodes=2*numdata+1;
@@ -88,29 +108,26 @@
     // number of events in bootstrap sample
     Int_t ninbag=0;
-    for (Int_t n=0;n<numdata;n++)
-        if(jinbag[n]==1) ninbag++;
-
-    TArrayI bestsplit(nrnodes);
-    TArrayI bestsplitnext(nrnodes);
-
-    fBestVar.Set(nrnodes);
-    fTreeMap1.Set(nrnodes);
-    fTreeMap2.Set(nrnodes);
-    fBestSplit.Set(nrnodes);
-
-    fTreeMap1.Reset();
-    fTreeMap2.Reset();
-    fBestSplit.Reset();
-
-    fGiniDec.Set(numdim);
-    fGiniDec.Reset();
+    for (Int_t n=0;n<numdata;n++) if(jinbag[n]==1) ninbag++;
+
+    TArrayI bestsplit(nrnodes);      bestsplit.Reset(0);
+    TArrayI bestsplitnext(nrnodes);  bestsplitnext.Reset(0);
+
+    fBestVar.Set(nrnodes);    fBestVar.Reset(0);
+    fTreeMap1.Set(nrnodes);   fTreeMap1.Reset(0);
+    fTreeMap2.Set(nrnodes);   fTreeMap2.Reset(0);
+    fBestSplit.Set(nrnodes);  fBestSplit.Reset(0);
+    fGiniDec.Set(numdim);     fGiniDec.Reset(0);
+
+
+    if(fClassify)
+        FindBestSplit=&MRanTree::FindBestSplitGini;
+    else
+        FindBestSplit=&MRanTree::FindBestSplitSigma;
 
     // tree growing
-    BuildTree(datasort,datarang,hadtrue,bestsplit,
-              bestsplitnext,tclasspop,winbag,ninbag);
+    BuildTree(datasort,datarang,hadtrue,idclass,bestsplit, bestsplitnext,
+              tclasspop,mean,square,winbag,ninbag,nclass);
 
     // post processing, determine cut (or split) values fBestSplit
-    Int_t nhad=mhad.GetNrows();
-
     for(Int_t k=0; k<nrnodes; k++)
     {
@@ -122,7 +139,7 @@
         const Int_t &msp =fBestVar[k];
 
-        fBestSplit[k]  = bsp<nhad  ? mhad(bsp, msp):mgam(bsp-nhad, msp);
-        fBestSplit[k] += bspn<nhad ? mhad(bspn,msp):mgam(bspn-nhad,msp);
-        fBestSplit[k] /= 2;
+        fBestSplit[k]  = (*mat)(bsp, msp);
+        fBestSplit[k] += (*mat)(bspn,msp);
+        fBestSplit[k] /= 2.;
     }
 
@@ -134,8 +151,10 @@
 }
 
-Int_t MRanTree::FindBestSplit(const TArrayI &datasort,const TArrayI &datarang,
-                              const TArrayI &hadtrue,Int_t ndstart,Int_t ndend,TArrayF &tclasspop,
-                              Int_t &msplit,Float_t &decsplit,Int_t &nbest,
-                              const TArrayF &winbag)
+int MRanTree::FindBestSplitGini(const TArrayI &datasort,const TArrayI &datarang,
+                                const TArrayF &hadtrue,const TArrayI &idclass,
+                                Int_t ndstart,Int_t ndend, TArrayF &tclasspop,
+                                float &mean, float &square, Int_t &msplit,
+                                Float_t &decsplit,Int_t &nbest, const TArrayF &winbag,
+                                const int nclass)
 {
     const Int_t nrnodes = fBestSplit.GetSize();
@@ -143,9 +162,8 @@
     const Int_t mdim = fGiniDec.GetSize();
 
-    // weighted class populations after split
-    TArrayF wc(2); 
-    TArrayF wr(2); // right node
-
-    // For the best split, msplit is the index of the variable (e.g Hillas par., zenith angle ,...)
+    TArrayF wr(nclass); wr.Reset(0);// right node
+
+    // For the best split, msplit is the index of the variable (e.g Hillas par.,
+    // zenith angle ,...)
     // split on. decsplit is the decreae in impurity measured by Gini-index.
     // nsplit is the case number of value of msplit split on,
@@ -158,5 +176,6 @@
     Double_t pno=0;
     Double_t pdo=0;
-    for (Int_t j=0; j<2; j++)
+
+    for (Int_t j=0; j<nclass; j++)
     {
         pno+=tclasspop[j]*tclasspop[j];
@@ -165,5 +184,4 @@
 
     const Double_t crit0=pno/pdo;
-    Int_t jstat=0;
 
     // start main loop through variables to find best split,
@@ -184,32 +202,184 @@
         Double_t rld=0;
 
-        TArrayF wl(2); // left node
+        TArrayF wl(nclass); wl.Reset(0.);// left node //nclass
         wr = tclasspop;
 
         Double_t critvar=-1.0e20;
-
+        for(Int_t nsp=ndstart;nsp<=ndend-1;nsp++)
+        {
+            const Int_t  &nc = datasort[mn+nsp];
+            const Int_t   &k = idclass[nc];
+            const Float_t &u = winbag[nc];
+
+            // do classification, Gini index as split rule
+            rln+=u*(2*wl[k]+u);
+            rrn+=u*(-2*wr[k]+u);
+
+            rld+=u;
+            rrd-=u;
+
+            wl[k]+=u;
+            wr[k]-=u;
+
+            if (datarang[mn+nc]>=datarang[mn+datasort[mn+nsp+1]])
+                continue;
+
+            if (TMath::Min(rrd,rld)<=1.0e-5)
+                continue;
+
+            const Double_t crit=(rln/rld)+(rrn/rrd);
+
+
+            if (crit<=critvar) continue;
+
+            nbestvar=nsp;
+            critvar=crit;
+        }
+
+        if (critvar<=critmax) continue;
+
+        msplit=mvar;
+        nbest=nbestvar;
+        critmax=critvar;
+    }
+
+    decsplit=critmax-crit0;
+
+    return critmax<-1.0e10 ? 1 : 0;
+}
+
+int MRanTree::FindBestSplitSigma(const TArrayI &datasort,const TArrayI &datarang,
+                                 const TArrayF &hadtrue, const TArrayI &idclass,
+                                 Int_t ndstart,Int_t ndend, TArrayF &tclasspop,
+                                 float &mean, float &square, Int_t &msplit,
+                                 Float_t &decsplit,Int_t &nbest, const TArrayF &winbag,
+                                 const int nclass)
+{
+    const Int_t nrnodes = fBestSplit.GetSize();
+    const Int_t numdata = (nrnodes-1)/2;
+    const Int_t mdim = fGiniDec.GetSize();
+
+    float wr=0;// right node
+
+    // For the best split, msplit is the index of the variable (e.g Hillas par., zenith angle ,...)
+    // split on. decsplit is the decreae in impurity measured by Gini-index.
+    // nsplit is the case number of value of msplit split on,
+    // and nsplitnext is the case number of the next larger value of msplit.
+
+    Int_t nbestvar=0;
+
+    // compute initial values of numerator and denominator of split-index,
+
+    // resolution
+    //Double_t pno=-(tclasspop[0]*square-mean*mean)*tclasspop[0];
+    //Double_t pdo= (tclasspop[0]-1.)*mean*mean;
+
+    // n*resolution
+    //Double_t pno=-(tclasspop[0]*square-mean*mean)*tclasspop[0];
+    //Double_t pdo= mean*mean;
+
+    // variance
+    //Double_t pno=-(square-mean*mean/tclasspop[0]);
+    //Double_t pdo= (tclasspop[0]-1.);
+
+    // n*variance
+    Double_t pno= (square-mean*mean/tclasspop[0]);
+    Double_t pdo= 1.;
+
+    // 1./(n*variance)
+    //Double_t pno= 1.;
+    //Double_t pdo= (square-mean*mean/tclasspop[0]);
+
+    const Double_t crit0=pno/pdo;
+
+    // start main loop through variables to find best split,
+
+    Double_t critmin=1.0e40;
+
+    // random split selection, number of trials = fNumTry
+    for (Int_t mt=0; mt<fNumTry; mt++)
+    {
+        const Int_t mvar=Int_t(gRandom->Rndm()*mdim);
+        const Int_t mn  = mvar*numdata;
+
+        Double_t rrn=0, rrd=0, rln=0, rld=0;
+        Double_t esumr=0, esuml=0, e2sumr=0,e2suml=0;
+
+        esumr =mean;
+        e2sumr=square;
+        esuml =0;
+        e2suml=0;
+
+        float wl=0.;// left node
+        wr = tclasspop[0];
+
+        Double_t critvar=critmin;
         for(Int_t nsp=ndstart;nsp<=ndend-1;nsp++)
         {
             const Int_t &nc=datasort[mn+nsp];
-            const Int_t &k=hadtrue[nc];
-
+            const Float_t &f=hadtrue[nc];;
             const Float_t &u=winbag[nc];
 
-            rln+=u*(2*wl[k]+u);
-            rrn+=u*(-2*wr[k]+u);
-            rld+=u;
-            rrd-=u;
-
-            wl[k]+=u;
-            wr[k]-=u;
+            e2sumr-=u*f*f;
+            esumr -=u*f;
+            wr    -=u;
+
+            //-------------------------------------------
+            // resolution
+            //rrn=(wr*e2sumr-esumr*esumr)*wr;
+            //rrd=(wr-1.)*esumr*esumr;
+
+            // resolution times n
+            //rrn=(wr*e2sumr-esumr*esumr)*wr;
+            //rrd=esumr*esumr;
+
+            // sigma
+            //rrn=(e2sumr-esumr*esumr/wr);
+            //rrd=(wr-1.);
+
+            // sigma times n
+            rrn=(e2sumr-esumr*esumr/wr);
+            rrd=1.;
+
+            // 1./(n*variance)
+            //rrn=1.;
+            //rrd=(e2sumr-esumr*esumr/wr);
+            //-------------------------------------------
+
+            e2suml+=u*f*f;
+            esuml +=u*f;
+            wl    +=u;
+
+            //-------------------------------------------
+            // resolution
+            //rln=(wl*e2suml-esuml*esuml)*wl;
+            //rld=(wl-1.)*esuml*esuml;
+
+            // resolution times n
+            //rln=(wl*e2suml-esuml*esuml)*wl;
+            //rld=esuml*esuml;
+
+            // sigma
+            //rln=(e2suml-esuml*esuml/wl);
+            //rld=(wl-1.);
+
+            // sigma times n
+            rln=(e2suml-esuml*esuml/wl);
+            rld=1.;
+
+            // 1./(n*variance)
+            //rln=1.;
+            //rld=(e2suml-esuml*esuml/wl);
+            //-------------------------------------------
 
             if (datarang[mn+nc]>=datarang[mn+datasort[mn+nsp+1]])
                 continue;
+
             if (TMath::Min(rrd,rld)<=1.0e-5)
                 continue;
 
             const Double_t crit=(rln/rld)+(rrn/rrd);
-            if (crit<=critvar)
-                continue;
+
+            if (crit>=critvar) continue;
 
             nbestvar=nsp;
@@ -217,19 +387,19 @@
         }
 
-        if (critvar<=critmax)
-            continue;
+        if (critvar>=critmin) continue;
 
         msplit=mvar;
         nbest=nbestvar;
-        critmax=critvar;
-    }
-
-    decsplit=critmax-crit0;
-
-    return critmax<-1.0e10 ? 1 : jstat;
-}
-
-void MRanTree::MoveData(TArrayI &datasort,Int_t ndstart,
-                        Int_t ndend,TArrayI &idmove,TArrayI &ncase,Int_t msplit,
+        critmin=critvar;
+    }
+
+    decsplit=crit0-critmin;
+
+    //return critmin>1.0e20 ? 1 : 0;
+    return decsplit<0 ? 1 : 0;
+}
+
+void MRanTree::MoveData(TArrayI &datasort,Int_t ndstart, Int_t ndend,
+                        TArrayI &idmove,TArrayI &ncase,Int_t msplit,
                         Int_t nbest,Int_t &ndendl)
 {
@@ -240,8 +410,7 @@
     const Int_t mdim    = fGiniDec.GetSize();
 
-    TArrayI tdatasort(numdata);
+    TArrayI tdatasort(numdata); tdatasort.Reset(0);
 
     // compute idmove = indicator of case nos. going left
-
     for (Int_t nsp=ndstart;nsp<=ndend;nsp++)
     {
@@ -252,5 +421,4 @@
 
     // shift case. nos. right and left for numerical variables.
-
     for(Int_t msh=0;msh<mdim;msh++)
     {
@@ -280,8 +448,8 @@
 }
 
-void MRanTree::BuildTree(TArrayI &datasort,const TArrayI &datarang,
-                         const TArrayI &hadtrue, TArrayI &bestsplit,
-                         TArrayI &bestsplitnext, TArrayF &tclasspop,
-                         const TArrayF &winbag, Int_t ninbag)
+void MRanTree::BuildTree(TArrayI &datasort,const TArrayI &datarang, const TArrayF &hadtrue,
+                         const TArrayI &idclass, TArrayI &bestsplit, TArrayI &bestsplitnext,
+                         TArrayF &tclasspop, float &tmean, float &tsquare, const TArrayF &winbag,
+                         Int_t ninbag, const int nclass)
 {
     // Buildtree consists of repeated calls to two void functions, FindBestSplit and MoveData.
@@ -302,17 +470,24 @@
     const Int_t numdata = (nrnodes-1)/2;
 
-    TArrayI nodepop(nrnodes);
-    TArrayI nodestart(nrnodes);
-    TArrayI parent(nrnodes);
-
-    TArrayI ncase(numdata);
-    TArrayI idmove(numdata);
-    TArrayI iv(mdim);
-
-    TArrayF classpop(nrnodes*2);
-    TArrayI nodestatus(nrnodes);
-
-    for (Int_t j=0;j<2;j++)
+    TArrayI nodepop(nrnodes);     nodepop.Reset(0);
+    TArrayI nodestart(nrnodes);   nodestart.Reset(0);
+    TArrayI parent(nrnodes);      parent.Reset(0);
+
+    TArrayI ncase(numdata);       ncase.Reset(0);
+    TArrayI idmove(numdata);      idmove.Reset(0);
+    TArrayI iv(mdim);             iv.Reset(0);
+
+    TArrayF classpop(nrnodes*nclass);  classpop.Reset(0.);//nclass
+    TArrayI nodestatus(nrnodes);       nodestatus.Reset(0);
+
+    for (Int_t j=0;j<nclass;j++)
         classpop[j*nrnodes+0]=tclasspop[j];
+
+    TArrayF mean(nrnodes);   mean.Reset(0.);
+    TArrayF square(nrnodes); square.Reset(0.);
+
+    mean[0]=tmean;
+    square[0]=tsquare;
+
 
     Int_t ncur=0;
@@ -330,14 +505,17 @@
           const Int_t ndstart=nodestart[kbuild];
           const Int_t ndend=ndstart+nodepop[kbuild]-1;
-          for (Int_t j=0;j<2;j++)
+
+          for (Int_t j=0;j<nclass;j++)
               tclasspop[j]=classpop[j*nrnodes+kbuild];
+
+          tmean=mean[kbuild];
+          tsquare=square[kbuild];
 
           Int_t msplit, nbest;
           Float_t decsplit=0;
-          const Int_t jstat=FindBestSplit(datasort,datarang,hadtrue,
-                                          ndstart,ndend,tclasspop,msplit,
-                                          decsplit,nbest,winbag);
-
-          if (jstat==1)
+
+          if ((*this.*FindBestSplit)(datasort,datarang,hadtrue,idclass,ndstart,
+                                     ndend, tclasspop,tmean, tsquare,msplit,decsplit,
+                                     nbest,winbag,nclass))
           {
               nodestatus[kbuild]=-1;
@@ -356,5 +534,4 @@
 
           // leftnode no.= ncur+1, rightnode no. = ncur+2.
-
           nodepop[ncur+1]=ndendl-ndstart+1;
           nodepop[ncur+2]=ndend-ndendl;
@@ -363,9 +540,12 @@
 
           // find class populations in both nodes
-
           for (Int_t n=ndstart;n<=ndendl;n++)
           {
               const Int_t &nc=ncase[n];
-              const Int_t &j=hadtrue[nc];
+              const int j=idclass[nc];
+                   
+              mean[ncur+1]+=hadtrue[nc]*winbag[nc];
+              square[ncur+1]+=hadtrue[nc]*hadtrue[nc]*winbag[nc];
+
               classpop[j*nrnodes+ncur+1]+=winbag[nc];
           }
@@ -374,5 +554,9 @@
           {
               const Int_t &nc=ncase[n];
-              const Int_t &j=hadtrue[nc];
+              const int j=idclass[nc];
+
+              mean[ncur+2]  +=hadtrue[nc]*winbag[nc];
+              square[ncur+2]+=hadtrue[nc]*hadtrue[nc]*winbag[nc];
+
               classpop[j*nrnodes+ncur+2]+=winbag[nc];
           }
@@ -385,7 +569,8 @@
           if (nodepop[ncur+2]<=fNdSize) nodestatus[ncur+2]=-1;
 
+
           Double_t popt1=0;
           Double_t popt2=0;
-          for (Int_t j=0;j<2;j++)
+          for (Int_t j=0;j<nclass;j++)
           {
               popt1+=classpop[j*nrnodes+ncur+1];
@@ -393,8 +578,12 @@
           }
 
-          for (Int_t j=0;j<2;j++)
+          if(fClassify)
           {
-              if (classpop[j*nrnodes+ncur+1]==popt1) nodestatus[ncur+1]=-1;
-              if (classpop[j*nrnodes+ncur+2]==popt2) nodestatus[ncur+2]=-1;
+              // check if only members of one class in node
+              for (Int_t j=0;j<nclass;j++)
+              {
+                  if (classpop[j*nrnodes+ncur+1]==popt1) nodestatus[ncur+1]=-1;
+                  if (classpop[j*nrnodes+ncur+2]==popt2) nodestatus[ncur+2]=-1;
+              }
           }
 
@@ -421,47 +610,24 @@
         {
             fNumEndNodes++;
+
             Double_t pp=0;
-            for (Int_t j=0;j<2;j++)
+            for (Int_t j=0;j<nclass;j++)
             {
                 if(classpop[j*nrnodes+kn]>pp)
                 {
                     // class + status of node kn coded into fBestVar[kn]
-                    fBestVar[kn]=j-2;
+                    fBestVar[kn]=j-nclass;
                     pp=classpop[j*nrnodes+kn];
                 }
             }
-            fBestSplit[kn] =classpop[1*nrnodes+kn];
-            fBestSplit[kn]/=(classpop[0*nrnodes+kn]+classpop[1*nrnodes+kn]);
+
+                float sum=0;
+                for(int i=0;i<nclass;i++) sum+=classpop[i*nrnodes+kn];
+
+                fBestSplit[kn]=mean[kn]/sum;
         }
 }
 
-void MRanTree::SetRules(MDataArray *rules)
-{
-    fData=rules;
-}
-
 Double_t MRanTree::TreeHad(const TVector &event)
-{
-    Int_t kt=0;
-    // to optimize on storage space node status and node class
-    // are coded into fBestVar:
-    // status of node kt = TMath::Sign(1,fBestVar[kt])
-    // class  of node kt = fBestVar[kt]+2
-    //  (class defined by larger node population, actually not used)
-    // hadronness assigned to node kt = fBestSplit[kt]
-
-    for (Int_t k=0;k<fNumNodes;k++)
-    {
-        if (fBestVar[kt]<0)
-            break;
-
-        const Int_t m=fBestVar[kt];
-        kt = event(m)<=fBestSplit[kt] ? fTreeMap1[kt] : fTreeMap2[kt];
-    }
-
-    return fBestSplit[kt];
-}
-
-Double_t MRanTree::TreeHad(const TMatrixFRow_const &event)
 {
     Int_t kt=0;
@@ -485,4 +651,26 @@
 }
 
+Double_t MRanTree::TreeHad(const TMatrixRow &event)
+{
+    Int_t kt=0;
+    // to optimize on storage space node status and node class
+    // are coded into fBestVar:
+    // status of node kt = TMath::Sign(1,fBestVar[kt])
+    // class  of node kt = fBestVar[kt]+2 (class defined by larger
+    //  node population, actually not used)
+    // hadronness assigned to node kt = fBestSplit[kt]
+
+    for (Int_t k=0;k<fNumNodes;k++)
+    {
+        if (fBestVar[kt]<0)
+            break;
+
+        const Int_t m=fBestVar[kt];
+        kt = event(m)<=fBestSplit[kt] ? fTreeMap1[kt] : fTreeMap2[kt];
+    }
+
+    return fBestSplit[kt];
+}
+
 Double_t MRanTree::TreeHad(const TMatrix &m, Int_t ievt)
 {
@@ -494,21 +682,14 @@
 }
 
-Double_t MRanTree::TreeHad()
-{
-    TVector event;
-    *fData >> event;
-
-    return TreeHad(event);
-}
-
 Bool_t MRanTree::AsciiWrite(ostream &out) const
 {
-    out.width(5);
-    out << fNumNodes << endl;
-
+    TString str;
     Int_t k;
-    for (k=0; k<fNumNodes; k++)
-    {
-        TString str=Form("%f", GetBestSplit(k));
+
+    out.width(5);out<<fNumNodes<<endl;
+
+    for (k=0;k<fNumNodes;k++)
+    {
+        str=Form("%f",GetBestSplit(k));
 
         out.width(5);  out << k;
@@ -520,6 +701,6 @@
         out.width(5);  out << GetNodeClass(k);
     }
-    out << endl;
-
-    return kTRUE;
-}
+    out<<endl;
+
+    return k==fNumNodes;
+}
Index: trunk/MagicSoft/Mars/mranforest/MRanTree.h
===================================================================
--- trunk/MagicSoft/Mars/mranforest/MRanTree.h	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/MRanTree.h	(revision 7396)
@@ -16,12 +16,11 @@
 class TMatrix;
 class TMatrixRow;
-class TMatrixFRow_const;
 class TVector;
 class TRandom;
-class MDataArray;
 
 class MRanTree : public MParContainer
 {
 private:
+    Int_t fClassify;
     Int_t fNdSize;
     Int_t fNumTry;
@@ -29,5 +28,4 @@
     Int_t fNumNodes;
     Int_t fNumEndNodes;
-    MDataArray *fData;
 
     TArrayI fBestVar;
@@ -35,12 +33,25 @@
     TArrayI fTreeMap2;
     TArrayF fBestSplit;
-
     TArrayF fGiniDec;
 
-    Int_t FindBestSplit(const TArrayI &datasort, const TArrayI &datarang,
-                        const TArrayI &hadtrue,
-                        Int_t ndstart, Int_t ndend, TArrayF &tclasspop,
-                        Int_t &msplit, Float_t &decsplit, Int_t &nbest,
-                        const TArrayF &winbag);
+    int (MRanTree::*FindBestSplit)
+        (const TArrayI &, const TArrayI &, const TArrayF &, const TArrayI &,
+         Int_t, Int_t , TArrayF &, float &, float &, Int_t &, Float_t &,
+         Int_t &, const TArrayF &, const int); //!
+
+
+    int FindBestSplitGini(const TArrayI &datasort, const TArrayI &datarang,
+                          const TArrayF &hadtrue, const TArrayI &idclass,
+                          Int_t ndstart, Int_t ndend, TArrayF &tclasspop,
+                          float &mean, float &square, Int_t &msplit,
+                          Float_t &decsplit, Int_t &nbest, const TArrayF &winbag,
+                          const int nclass);
+
+    int FindBestSplitSigma(const TArrayI &datasort, const TArrayI &datarang,
+                           const TArrayF &hadtrue, const TArrayI &idclass,
+                           Int_t ndstart, Int_t ndend, TArrayF &tclasspop,
+                           float &mean, float &square, Int_t &msplit,
+                           Float_t &decsplit, Int_t &nbest, const TArrayF &winbag,
+                           const int nclass);
 
     void MoveData(TArrayI &datasort, Int_t ndstart, Int_t ndend,
@@ -48,19 +59,15 @@
                   Int_t nbest, Int_t &ndendl);
 
-    void BuildTree(TArrayI &datasort, const TArrayI &datarang,
-                   const TArrayI &hadtrue,
-                   TArrayI &bestsplit,TArrayI &bestsplitnext,
-                   TArrayF &tclasspop,
-                   const TArrayF &winbag,
-                   Int_t ninbag);
+    void BuildTree(TArrayI &datasort, const TArrayI &datarang, const TArrayF &hadtrue,
+                   const TArrayI &idclass,TArrayI &bestsplit,TArrayI &bestsplitnext,
+                   TArrayF &tclasspop, float &tmean, float &tsquare, const TArrayF &winbag,
+                   Int_t ninbag, const int nclass);
 
 public:
     MRanTree(const char *name=NULL, const char *title=NULL);
+    MRanTree(const MRanTree &tree);
 
     void SetNdSize(Int_t n);
     void SetNumTry(Int_t n);
-    void SetRules(MDataArray *rules);
-
-    MDataArray *GetRules() { return fData;}
 
     Int_t GetNdSize() const { return fNdSize; }
@@ -78,14 +85,15 @@
     Float_t GetGiniDec(Int_t i)  const { return fGiniDec.At(i); }
 
+    void SetClassify(Int_t n){ fClassify=n; }
+
     // functions used in tree growing process
-    void GrowTree(const TMatrix &mhad, const TMatrix &mgam,
-                  const TArrayI &hadtrue, TArrayI &datasort,
-                  const TArrayI &datarang,
-                  TArrayF &tclasspop, TArrayI &jinbag, const TArrayF &winbag);
+    void GrowTree(TMatrix *mat, const TArrayF &hadtrue, const TArrayI &idclass,
+                  TArrayI &datasort, const TArrayI &datarang,TArrayF &tclasspop,
+                  float &mean, float &square, TArrayI &jinbag, const TArrayF &winbag,
+                  const int nclass);
 
     Double_t TreeHad(const TVector &event);
-    Double_t TreeHad(const TMatrixFRow_const &event);
+    Double_t TreeHad(const TMatrixRow &event);
     Double_t TreeHad(const TMatrix &m, Int_t ievt);
-    Double_t TreeHad();
 
     Bool_t AsciiWrite(ostream &out) const;
Index: trunk/MagicSoft/Mars/mranforest/Makefile
===================================================================
--- trunk/MagicSoft/Mars/mranforest/Makefile	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/Makefile	(revision 7396)
@@ -25,6 +25,4 @@
            MRanForest.cc \
            MRanForestGrow.cc \
-           MRanForestCalc.cc \
-           MRanForestFill.cc \
 	   MHRanForest.cc \
 	   MHRanForestGini.cc \
Index: trunk/MagicSoft/Mars/mranforest/RanForestLinkDef.h
===================================================================
--- trunk/MagicSoft/Mars/mranforest/RanForestLinkDef.h	(revision 7395)
+++ trunk/MagicSoft/Mars/mranforest/RanForestLinkDef.h	(revision 7396)
@@ -8,6 +8,4 @@
 #pragma link C++ class MRanForest+;
 #pragma link C++ class MRanForestGrow+;
-#pragma link C++ class MRanForestCalc+;
-#pragma link C++ class MRanForestFill+;    
 
 #pragma link C++ class MHRanForest+;
