source: trunk/MagicSoft/Mars/mranforest/MRanForestCalc.cc@ 8704

Last change on this file since 8704 was 8704, checked in by tbretz, 18 years ago
*** empty log message ***
File size: 13.7 KB
Line 
1/* ======================================================================== *\
2! $Name: not supported by cvs2svn $:$Id: MRanForestCalc.cc,v 1.29 2007-08-24 08:33:48 tbretz Exp $
3! --------------------------------------------------------------------------
4!
5! *
6! * This file is part of MARS, the MAGIC Analysis and Reconstruction
7! * Software. It is distributed to you in the hope that it can be a useful
8! * and timesaving tool in analysing Data of imaging Cerenkov telescopes.
9! * It is distributed WITHOUT ANY WARRANTY.
10! *
11! * Permission to use, copy, modify and distribute this software and its
12! * documentation for any purpose is hereby granted without fee,
13! * provided that the above copyright notice appear in all copies and
14! * that both that copyright notice and this permission notice appear
15! * in supporting documentation. It is provided "as is" without express
16! * or implied warranty.
17! *
18!
19!
20! Author(s): Thomas Hengstebeck 2/2005 <mailto:hengsteb@physik.hu-berlin.de>
21! Author(s): Thomas Bretz 8/2005 <mailto:tbretz@astro.uni-wuerzburg.de>
22!
23! Copyright: MAGIC Software Development, 2000-2006
24!
25!
26\* ======================================================================== */
27
28/////////////////////////////////////////////////////////////////////////////
29//
30// MRanForestCalc
31//
32//
33////////////////////////////////////////////////////////////////////////////
34#include "MRanForestCalc.h"
35
36#include <TF1.h>
37#include <TFile.h>
38#include <TGraph.h>
39#include <TVector.h>
40
41#include "MHMatrix.h"
42
43#include "MLog.h"
44#include "MLogManip.h"
45
46#include "MData.h"
47#include "MDataArray.h"
48
49#include "MRanForest.h"
50#include "MParameters.h"
51
52#include "MParList.h"
53#include "MTaskList.h"
54#include "MEvtLoop.h"
55#include "MRanForestGrow.h"
56#include "MFillH.h"
57
58ClassImp(MRanForestCalc);
59
60using namespace std;
61
62const TString MRanForestCalc::gsDefName = "MRanForestCalc";
63const TString MRanForestCalc::gsDefTitle = "RF for energy estimation";
64
65const TString MRanForestCalc::gsNameOutput = "RanForestOut";
66const TString MRanForestCalc::gsNameEvalFunc = "EvalFunction";
67
68MRanForestCalc::MRanForestCalc(const char *name, const char *title)
69 : fData(0), fRFOut(0), fTestMatrix(0), fFunc("x"),
70 fNumTrees(-1), fNumTry(-1), fNdSize(-1), fNumObsoleteVariables(1),
71 fLastDataColumnHasWeights(kFALSE),
72 fNameOutput(gsNameOutput), fDebug(kFALSE), fEstimationMode(kMean)
73{
74 fName = name ? name : gsDefName.Data();
75 fTitle = title ? title : gsDefTitle.Data();
76
77 // FIXME:
78 fNumTrees = 100; //100
79 fNumTry = 0; //3 0 means: in MRanForest estimated best value will be calculated
80 fNdSize = 1; //1
81}
82
83MRanForestCalc::~MRanForestCalc()
84{
85 fEForests.Delete();
86}
87
88// --------------------------------------------------------------------------
89//
90// Set a function which is applied to the output of the random forest
91//
92Bool_t MRanForestCalc::SetFunction(const char *func)
93{
94 return !fFunc.SetRule(func);
95}
96
97// --------------------------------------------------------------------------
98//
99// ver=0: One yes/no-classification forest is trained for each bin.
100// the yes/no classification is done using the grid
101// ver=1: One classification forest is trained. The last column contains a
102// value which is turned into a classifier by rf itself using the grid
103// ver=2: One classification forest is trained. The last column already contains
104// the classifier
105// ver=3: A regression forest is trained. The last column contains the
106// classifier
107//
108Int_t MRanForestCalc::Train(const MHMatrix &matrixtrain, const TArrayD &grid, Int_t ver)
109{
110 gLog.Separator("MRanForestCalc - Train");
111
112 if (!matrixtrain.GetColumns())
113 {
114 *fLog << err << "ERROR - MHMatrix does not contain rules... abort." << endl;
115 return kFALSE;
116 }
117
118 const Int_t ncols = matrixtrain.GetM().GetNcols();
119 const Int_t nrows = matrixtrain.GetM().GetNrows();
120 if (ncols<=0 || nrows <=0)
121 {
122 *fLog << err << "ERROR - No. of columns or no. of rows of matrixtrain equal 0 ... abort." << endl;
123 return kFALSE;
124 }
125
126 // rules (= combination of image par) to be used for energy estimation
127 TFile fileRF(fFileName, "recreate");
128 if (!fileRF.IsOpen())
129 {
130 *fLog << err << "ERROR - File to store RFs could not be opened... abort." << endl;
131 return kFALSE;
132 }
133
134 // The number of columns which have to be removed for the training
135 // The last data column may contain weight which also have to be removed
136 const Int_t nobs = fNumObsoleteVariables + (fLastDataColumnHasWeights?1:0); // Number of obsolete columns
137
138 const MDataArray &dcol = *matrixtrain.GetColumns();
139
140 // Make a copy of the rules for accessing the train-data
141 MDataArray usedrules;
142 for (Int_t i=0; i<ncols; i++)
143 if (i<ncols-nobs) // -3 is important!!!
144 usedrules.AddEntry(dcol[i].GetRule());
145 else
146 *fLog << inf << "Skipping " << dcol[i].GetRule() << " for training" << endl;
147
148 // In the case of regression store the rule to be regessed in the
149 // last entry of your rules
150 MDataArray rules(usedrules);
151 rules.AddEntry(ver<3?"Classification.fVal":dcol[ncols-1].GetRule().Data());
152
153 // prepare train-matrix finally used
154 TMatrix mat(matrixtrain.GetM());
155
156 // Resize it such that the obsolete columns are removed
157 mat.ResizeTo(nrows, ncols-nobs+1);
158
159 if (fDebug)
160 gLog.SetNullOutput(kTRUE);
161
162 // In the case one independant RF is trained for each bin (e.g.
163 // energy-bin) train all of them
164 const Int_t nbins = ver>0 ? 1 : grid.GetSize()-1;
165 for (Int_t ie=0; ie<nbins; ie++)
166 {
167 // In the case weights should be used initialize the
168 // corresponding array
169 TArrayF weights(nrows);
170 if (fLastDataColumnHasWeights)
171 for (Int_t j=0; j<nrows; j++)
172 {
173 weights[j] = matrixtrain.GetM()(j, ncols-nobs);
174 if (j%250==0)
175 cout << weights[j] << " ";
176 }
177
178 // Setup the matrix such that the last comlumn contains
179 // the classifier or the regeression target value
180 switch (ver)
181 {
182 case 0: // Replace last column by a classification which is 1 in
183 // the case the event belongs to this bin, 0 otherwise
184 {
185 Int_t irows=0;
186 for (Int_t j=0; j<nrows; j++)
187 {
188 const Double_t value = matrixtrain.GetM()(j,ncols-1);
189 const Bool_t inside = value>grid[ie] && value<=grid[ie+1];
190
191 mat(j, ncols-nobs) = inside ? 1 : 0;
192
193 if (inside)
194 irows++;
195 }
196 if (irows==0)
197 *fLog << warn << "WARNING - Skipping";
198 else
199 *fLog << inf << "Training RF for";
200
201 *fLog << " bin " << ie << " (" << grid[ie] << ", " << grid[ie+1] << ") " << irows << "/" << nrows << endl;
202
203 if (irows==0)
204 continue;
205 }
206 break;
207
208 case 1: // Use last column as classifier or for regression
209 case 2:
210 case 3:
211 for (Int_t j=0; j<nrows; j++)
212 mat(j, ncols-nobs) = matrixtrain.GetM()(j,ncols-1);
213 break;
214 }
215
216 MHMatrix matrix(mat, &rules, "MatrixTrain");
217
218 MParList plist;
219 MTaskList tlist;
220 plist.AddToList(&tlist);
221 plist.AddToList(&matrix);
222
223 MRanForest rf;
224 rf.SetNumTrees(fNumTrees);
225 rf.SetNumTry(fNumTry);
226 rf.SetNdSize(fNdSize);
227 rf.SetClassify(ver<3 ? kTRUE : kFALSE);
228 if (ver==1)
229 rf.SetGrid(grid);
230 if (fLastDataColumnHasWeights)
231 rf.SetWeights(weights);
232
233 plist.AddToList(&rf);
234
235 MRanForestGrow rfgrow;
236 tlist.AddToList(&rfgrow);
237
238 MFillH fillh("MHRanForestGini");
239 tlist.AddToList(&fillh);
240
241 MEvtLoop evtloop(fTitle);
242 evtloop.SetParList(&plist);
243 evtloop.SetDisplay(fDisplay);
244 evtloop.SetLogStream(fLog);
245
246 if (!evtloop.Eventloop())
247 return kFALSE;
248
249 if (fDebug)
250 gLog.SetNullOutput(kFALSE);
251
252 if (ver==0)
253 {
254 // Calculate bin center
255 const Double_t E = (TMath::Log10(grid[ie])+TMath::Log10(grid[ie+1]))/2;
256
257 // save whole forest
258 rf.SetUserVal(E);
259 rf.SetName(Form("%.10f", E));
260 }
261
262 rf.Write();
263 }
264
265 // save rules
266 usedrules.Write("rules");
267
268 fFunc.Write(gsNameEvalFunc);
269
270 return kTRUE;
271}
272
273Int_t MRanForestCalc::ReadForests(MParList &plist)
274{
275 TFile fileRF(fFileName, "read");
276 if (!fileRF.IsOpen())
277 {
278 *fLog << err << dbginf << "File containing RFs could not be opened... aborting." << endl;
279 return kFALSE;
280 }
281
282 fEForests.Delete();
283
284 TIter Next(fileRF.GetListOfKeys());
285 TObject *o=0;
286 while ((o=Next()))
287 {
288 MRanForest *forest=0;
289 fileRF.GetObject(o->GetName(), forest);
290 if (!forest)
291 continue;
292
293 forest->SetUserVal(atof(o->GetName()));
294
295 fEForests.Add(forest);
296 }
297
298 // Maybe fEForests[0].fRules could be used instead?
299 if (fData->Read("rules")<=0)
300 {
301 *fLog << err << "ERROR - Reading 'rules' from file " << fFileName << endl;
302 return kFALSE;
303 }
304
305 if (fileRF.GetListOfKeys()->FindObject(gsNameEvalFunc))
306 {
307 if (fFunc.Read(gsNameEvalFunc)<=0)
308 {
309 *fLog << err << "ERROR - Reading '" << gsNameEvalFunc << "' from file " << fFileName << endl;
310 return kFALSE;
311 }
312
313 *fLog << inf << "Evaluation function found in file: " << fFunc.GetRule() << endl;
314 }
315
316 return kTRUE;
317}
318
319Int_t MRanForestCalc::PreProcess(MParList *plist)
320{
321 fRFOut = (MParameterD*)plist->FindCreateObj("MParameterD", fNameOutput);
322 if (!fRFOut)
323 return kFALSE;
324
325 fData = (MDataArray*)plist->FindCreateObj("MDataArray");
326 if (!fData)
327 return kFALSE;
328
329 if (!ReadForests(*plist))
330 {
331 *fLog << err << "Reading RFs failed... aborting." << endl;
332 return kFALSE;
333 }
334
335 *fLog << inf << "RF read from " << fFileName << endl;
336
337 if (!fFunc.PreProcess(plist))
338 {
339 *fLog << err << "PreProcessing of evaluation function failed... aborting." << endl;
340 return kFALSE;
341 }
342
343 if (fTestMatrix)
344 return kTRUE;
345
346 fData->Print();
347
348 if (!fData->PreProcess(plist))
349 {
350 *fLog << err << "PreProcessing of the MDataArray failed... aborting." << endl;
351 return kFALSE;
352 }
353
354 return kTRUE;
355}
356
357Double_t MRanForestCalc::Eval() const
358{
359 TVector event;
360 if (fTestMatrix)
361 *fTestMatrix >> event;
362 else
363 *fData >> event;
364
365 // --------------- Single Tree RF -------------------
366 if (fEForests.GetEntriesFast()==1)
367 {
368 MRanForest *rf = static_cast<MRanForest*>(fEForests.UncheckedAt(0));
369 return rf->CalcHadroness(event);
370 }
371
372 // --------------- Multi Tree RF -------------------
373 static TF1 f1("f1", "gaus");
374
375 Double_t sume = 0;
376 Double_t sumh = 0;
377 Double_t maxh = 0;
378 Double_t maxe = 0;
379
380 Double_t max = -1e10;
381 Double_t min = 1e10;
382
383 TIter Next(&fEForests);
384 MRanForest *rf = 0;
385
386 TGraph g;
387 while ((rf=(MRanForest*)Next()))
388 {
389 const Double_t h = rf->CalcHadroness(event);
390 const Double_t e = rf->GetUserVal();
391
392 g.SetPoint(g.GetN(), e, h);
393
394 sume += e*h;
395 sumh += h;
396
397 if (h>maxh)
398 {
399 maxh = h;
400 maxe = e;
401 }
402 if (e>max)
403 max = e;
404 if (e<min)
405 min = e;
406 }
407
408 switch (fEstimationMode)
409 {
410 case kMean:
411 return sume/sumh;
412 case kMaximum:
413 return maxe;
414 case kFit:
415 f1.SetParameter(0, maxh);
416 f1.SetParameter(1, maxe);
417 f1.SetParameter(2, 0.125);
418 g.Fit(&f1, "Q0N");
419 return f1.GetParameter(1);
420 }
421
422 return 0;
423}
424
425Int_t MRanForestCalc::Process()
426{
427 const Double_t val = Eval();
428
429 fRFOut->SetVal(fFunc.Eval(val));
430 fRFOut->SetReadyToSave();
431
432 return kTRUE;
433}
434
435void MRanForestCalc::Print(Option_t *o) const
436{
437 *fLog << all;
438 *fLog << GetDescriptor() << ":" << endl;
439 *fLog << " - Forest ";
440 switch (fEForests.GetEntries())
441 {
442 case 0: *fLog << "not yet initialized." << endl; break;
443 case 1: *fLog << "is a single tree forest." << endl; break;
444 default: *fLog << "is a multi tree forest." << endl; break;
445 }
446 /*
447 *fLog << " - Trees: " << fNumTrees << endl;
448 *fLog << " - Trys: " << fNumTry << endl;
449 *fLog << " - Node Size: " << fNdSize << endl;
450 *fLog << " - Node Size: " << fNdSize << endl;
451 */
452 *fLog << " - FileName: " << fFileName << endl;
453 *fLog << " - NameOutput: " << fNameOutput << endl;
454}
455
456// --------------------------------------------------------------------------
457//
458//
459Int_t MRanForestCalc::ReadEnv(const TEnv &env, TString prefix, Bool_t print)
460{
461 Bool_t rc = kFALSE;
462 if (IsEnvDefined(env, prefix, "FileName", print))
463 {
464 rc = kTRUE;
465 SetFileName(GetEnvValue(env, prefix, "FileName", fFileName));
466 }
467 if (IsEnvDefined(env, prefix, "Debug", print))
468 {
469 rc = kTRUE;
470 SetDebug(GetEnvValue(env, prefix, "Debug", fDebug));
471 }
472 if (IsEnvDefined(env, prefix, "NameOutput", print))
473 {
474 rc = kTRUE;
475 SetNameOutput(GetEnvValue(env, prefix, "NameOutput", fNameOutput));
476 }
477 if (IsEnvDefined(env, prefix, "EstimationMode", print))
478 {
479 TString txt = GetEnvValue(env, prefix, "EstimationMode", "");
480 txt = txt.Strip(TString::kBoth);
481 txt.ToLower();
482 if (txt==(TString)"mean")
483 fEstimationMode = kMean;
484 if (txt==(TString)"maximum")
485 fEstimationMode = kMaximum;
486 if (txt==(TString)"fit")
487 fEstimationMode = kFit;
488 rc = kTRUE;
489 }
490 return rc;
491}
Note: See TracBrowser for help on using the repository browser.