source: trunk/MagicSoft/Mars/mranforest/MRanTree.cc@ 7730

Last change on this file since 7730 was 7730, checked in by tbretz, 18 years ago
*** empty log message ***
File size: 22.8 KB
Line 
1/* ======================================================================== *\
2!
3! *
4! * This file is part of MARS, the MAGIC Analysis and Reconstruction
5! * Software. It is distributed to you in the hope that it can be a useful
6! * and timesaving tool in analysing Data of imaging Cerenkov telescopes.
7! * It is distributed WITHOUT ANY WARRANTY.
8! *
9! * Permission to use, copy, modify and distribute this software and its
10! * documentation for any purpose is hereby granted without fee,
11! * provided that the above copyright notice appear in all copies and
12! * that both that copyright notice and this permission notice appear
13! * in supporting documentation. It is provided "as is" without express
14! * or implied warranty.
15! *
16!
17!
18! Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@physik.hu-berlin.de>
19!
20! Copyright: MAGIC Software Development, 2000-2005
21!
22!
23\* ======================================================================== */
24
25/////////////////////////////////////////////////////////////////////////////
26//
27// MRanTree
28//
29// ParameterContainer for Tree structure
30//
31/////////////////////////////////////////////////////////////////////////////
32#include "MRanTree.h"
33
34#include <iostream>
35
36#include <TVector.h>
37#include <TMatrix.h>
38#include <TRandom.h>
39
40#include "MArrayI.h"
41#include "MArrayF.h"
42
43#include "MMath.h"
44
45#include "MLog.h"
46#include "MLogManip.h"
47
48ClassImp(MRanTree);
49
50using namespace std;
51
52
53// --------------------------------------------------------------------------
54// Default constructor.
55//
56MRanTree::MRanTree(const char *name, const char *title):fClassify(kTRUE),fNdSize(0), fNumTry(3)
57{
58
59 fName = name ? name : "MRanTree";
60 fTitle = title ? title : "Storage container for structure of a single tree";
61}
62
63// --------------------------------------------------------------------------
64// Copy constructor
65//
66MRanTree::MRanTree(const MRanTree &tree)
67{
68 fName = tree.fName;
69 fTitle = tree.fTitle;
70
71 fClassify = tree.fClassify;
72 fNdSize = tree.fNdSize;
73 fNumTry = tree.fNumTry;
74
75 fNumNodes = tree.fNumNodes;
76 fNumEndNodes = tree.fNumEndNodes;
77
78 fBestVar = tree.fBestVar;
79 fTreeMap1 = tree.fTreeMap1;
80 fTreeMap2 = tree.fTreeMap2;
81 fBestSplit = tree.fBestSplit;
82 fGiniDec = tree.fGiniDec;
83}
84
85void MRanTree::SetNdSize(Int_t n)
86{
87 // threshold nodesize of terminal nodes, i.e. the training data is splitted
88 // until there is only pure date in the subsets(=terminal nodes) or the
89 // subset size is LE n
90
91 fNdSize=TMath::Max(1,n);//at least 1 event per node
92}
93
94void MRanTree::SetNumTry(Int_t n)
95{
96 // number of trials in random split selection:
97 // choose at least 1 variable to split in
98
99 fNumTry=TMath::Max(1,n);
100}
101
102void MRanTree::GrowTree(TMatrix *mat, const MArrayF &hadtrue, const MArrayI &idclass,
103 MArrayI &datasort, const MArrayI &datarang, const MArrayF &tclasspop,
104 const Float_t &mean, const Float_t &square, const MArrayI &jinbag, const MArrayF &winbag,
105 const int nclass)
106{
107 // arrays have to be initialized with generous size, so number of total nodes (nrnodes)
108 // is estimated for worst case
109 const Int_t numdim =mat->GetNcols();
110 const Int_t numdata=winbag.GetSize();
111 const Int_t nrnodes=2*numdata+1;
112
113 // number of events in bootstrap sample
114 Int_t ninbag=0;
115 for (Int_t n=0;n<numdata;n++) if(jinbag[n]==1) ninbag++;
116
117 MArrayI bestsplit(nrnodes);
118 MArrayI bestsplitnext(nrnodes);
119
120 fBestVar.Set(nrnodes); fBestVar.Reset();
121 fTreeMap1.Set(nrnodes); fTreeMap1.Reset();
122 fTreeMap2.Set(nrnodes); fTreeMap2.Reset();
123 fBestSplit.Set(nrnodes); fBestSplit.Reset();
124 fGiniDec.Set(numdim); fGiniDec.Reset();
125
126
127 if(fClassify)
128 FindBestSplit=&MRanTree::FindBestSplitGini;
129 else
130 FindBestSplit=&MRanTree::FindBestSplitSigma;
131
132 // tree growing
133 BuildTree(datasort,datarang,hadtrue,idclass,bestsplit, bestsplitnext,
134 tclasspop,mean,square,winbag,ninbag,nclass);
135
136 // post processing, determine cut (or split) values fBestSplit
137 for(Int_t k=0; k<nrnodes; k++)
138 {
139 if (GetNodeStatus(k)==-1)
140 continue;
141
142 const Int_t &bsp =bestsplit[k];
143 const Int_t &bspn=bestsplitnext[k];
144 const Int_t &msp =fBestVar[k];
145
146 fBestSplit[k] = ((*mat)(bsp, msp)+(*mat)(bspn,msp))/2;
147 }
148
149 // resizing arrays to save memory
150 fBestVar.Set(fNumNodes);
151 fTreeMap1.Set(fNumNodes);
152 fTreeMap2.Set(fNumNodes);
153 fBestSplit.Set(fNumNodes);
154}
155
156int MRanTree::FindBestSplitGini(const MArrayI &datasort,const MArrayI &datarang,
157 const MArrayF &hadtrue,const MArrayI &idclass,
158 Int_t ndstart,Int_t ndend, const MArrayF &tclasspop,
159 const Float_t &mean, const Float_t &square, Int_t &msplit,
160 Float_t &decsplit,Int_t &nbest, const MArrayF &winbag,
161 const int nclass)
162{
163 const Int_t nrnodes = fBestSplit.GetSize();
164 const Int_t numdata = (nrnodes-1)/2;
165 const Int_t mdim = fGiniDec.GetSize();
166
167 // For the best split, msplit is the index of the variable (e.g Hillas par.,
168 // zenith angle ,...)
169 // split on. decsplit is the decreae in impurity measured by Gini-index.
170 // nsplit is the case number of value of msplit split on,
171 // and nsplitnext is the case number of the next larger value of msplit.
172
173 Int_t nbestvar=0;
174
175 // compute initial values of numerator and denominator of Gini-index,
176 // Gini index= pno/dno
177 Double_t pno=0;
178 Double_t pdo=0;
179
180 // tclasspop: sum of weights for events in class
181 for (Int_t j=0; j<nclass; j++) // loop over number of classes to classifiy
182 {
183 pno+=tclasspop[j]*tclasspop[j];
184 pdo+=tclasspop[j];
185 }
186
187 const Double_t crit0=pno/pdo; // weighted mean of weights
188
189 // start main loop through variables to find best split,
190 // (Gini-index as criterium crit)
191
192 Double_t critmax=-FLT_MAX;
193
194 // random split selection, number of trials = fNumTry
195 for (Int_t mt=0; mt<fNumTry; mt++) // we could try ALL variables???
196 {
197 const Int_t mvar= gRandom->Integer(mdim);
198 const Int_t mn = mvar*numdata;
199
200 // Gini index = rrn/rrd+rln/rld
201 Double_t rrn=pno;
202 Double_t rrd=pdo;
203 Double_t rln=0;
204 Double_t rld=0;
205
206 MArrayF wl(nclass); // left node //nclass
207 MArrayF wr(tclasspop); // right node//nclass
208
209 Double_t critvar=-FLT_MAX;
210 for(Int_t nsp=ndstart;nsp<=ndend-1;nsp++)
211 {
212 const Int_t &nc = datasort[mn+nsp];
213 const Int_t &k = idclass[nc];
214 const Float_t &u = winbag[nc];
215
216 // do classification, Gini index as split rule
217 rln +=u*(2*wl[k]+u); // += u*(wl[k]{i-1} + wl[k]{i-1}+u{i})
218 rld +=u; // sum of weights left from cut total
219 wl[k] +=u; // sum of weights left from cut for class k
220
221 rrn -=u*(2*wr[k]-u); // -= u*(wr[k]{i-1} + wr[k]{i-1}-u{i})
222 // rr0=0; rr0+=u*2*tclasspop[k]
223 // rrn = pno - rr0 + rln
224 rrd -=u; // sum of weights right from cut total
225 wr[k] -=u; // sum of weights right from cut for class k
226
227 // REPLACE BY?
228 // rr0 = 0
229 // rr0 += u*2*tclasspop[k]
230 // rrn = pno - rr0 + rln
231 // rrd = pdo - rld
232 // wr[k] = tclasspop[k] - wl[k]
233
234 // crit = (rln*(pdo - rld + 1) + pno - rr0) / rld*(pdo - rld)
235
236 /*
237 if (k==background)
238 continue;
239 crit = TMath::Max(MMath::SignificanceLiMa(rld, rld-wl[k]),
240 MMath::SignificanceLiMa(rrd, rrd-wr[k]))
241 */
242
243 // This condition is in fact a == (> cannot happen at all)
244 // This is because we cannot set the cut between two identical values
245 //if (datarang[mn+datasort[mn+nsp]]>=datarang[mn+datasort[mn+nsp+1]])
246 if (datarang[mn+nc]>=datarang[mn+datasort[mn+nsp+1]])
247 continue;
248
249 // If crit starts to become pretty large do WHAT???
250 //if (TMath::Min(rrd,rld)<=1.0e-5) // FIXME: CHECKIT FOR WEIGHTS!
251 // continue;
252
253 const Double_t crit=(rln/rld)+(rrn/rrd);
254 if (!TMath::Finite(crit))
255 continue;
256
257 // Search for the highest value of crit
258 if (crit<=critvar) continue;
259
260 // store the highest crit value and the corresponding event to cut at
261 nbestvar=nsp;
262 critvar=crit;
263 }
264
265 if (critvar<=critmax) continue;
266
267 msplit=mvar; // Variable in which to split
268 nbest=nbestvar; // event at which the best split was found
269 critmax=critvar;
270 }
271
272 // crit0 = MMath::SignificanceLiMa(pdo, pdo-tclasspop[0])
273 // mean increase of sensitivity
274 // decsplit = sqrt(critmax/crit0)
275 decsplit=critmax-crit0;
276
277 return critmax<-1.0e10 ? 1 : 0;
278}
279
280int MRanTree::FindBestSplitSigma(const MArrayI &datasort,const MArrayI &datarang,
281 const MArrayF &hadtrue, const MArrayI &idclass,
282 Int_t ndstart,Int_t ndend, const MArrayF &tclasspop,
283 const Float_t &mean, const Float_t &square, Int_t &msplit,
284 Float_t &decsplit,Int_t &nbest, const MArrayF &winbag,
285 const int nclass)
286{
287 const Int_t nrnodes = fBestSplit.GetSize();
288 const Int_t numdata = (nrnodes-1)/2;
289 const Int_t mdim = fGiniDec.GetSize();
290
291 // For the best split, msplit is the index of the variable (e.g Hillas par., zenith angle ,...)
292 // split on. decsplit is the decreae in impurity measured by Gini-index.
293 // nsplit is the case number of value of msplit split on,
294 // and nsplitnext is the case number of the next larger value of msplit.
295
296 Int_t nbestvar=0;
297
298 // compute initial values of numerator and denominator of split-index,
299
300 // resolution
301 //Double_t pno=-(tclasspop[0]*square-mean*mean)*tclasspop[0];
302 //Double_t pdo= (tclasspop[0]-1.)*mean*mean;
303
304 // n*resolution
305 //Double_t pno=-(tclasspop[0]*square-mean*mean)*tclasspop[0];
306 //Double_t pdo= mean*mean;
307
308 // variance
309 //Double_t pno=-(square-mean*mean/tclasspop[0]);
310 //Double_t pdo= (tclasspop[0]-1.);
311
312 // n*variance
313 Double_t pno= (square-mean*mean/tclasspop[0]);
314 Double_t pdo= 1.;
315
316 // 1./(n*variance)
317 //Double_t pno= 1.;
318 //Double_t pdo= (square-mean*mean/tclasspop[0]);
319
320 const Double_t crit0=pno/pdo;
321
322 // start main loop through variables to find best split,
323
324 Double_t critmin=FLT_MAX;
325
326 // random split selection, number of trials = fNumTry
327 for (Int_t mt=0; mt<fNumTry; mt++)
328 {
329 const Int_t mvar= gRandom->Integer(mdim);
330 const Int_t mn = mvar*numdata;
331
332 Double_t esumr =mean;
333 Double_t e2sumr=square;
334 Double_t esuml =0;
335 Double_t e2suml=0;
336
337 float wl=0.;// left node
338 float wr=tclasspop[0]; // right node
339
340 Double_t critvar=critmin;
341 for(Int_t nsp=ndstart;nsp<=ndend-1;nsp++)
342 {
343 const Int_t &nc=datasort[mn+nsp];
344 const Float_t &f=hadtrue[nc];;
345 const Float_t &u=winbag[nc];
346
347 e2suml+=u*f*f;
348 esuml +=u*f;
349 wl +=u;
350
351 //-------------------------------------------
352 // resolution
353 //const Double_t rln=(wl*e2suml-esuml*esuml)*wl;
354 //const Double_t rld=(wl-1.)*esuml*esuml;
355
356 // resolution times n
357 //const Double_t rln=(wl*e2suml-esuml*esuml)*wl;
358 //const Double_t rld=esuml*esuml;
359
360 // sigma
361 //const Double_t rln=(e2suml-esuml*esuml/wl);
362 //const Double_t rld=(wl-1.);
363
364 // sigma times n
365 Double_t rln=(e2suml-esuml*esuml/wl);
366 Double_t rld=1.;
367
368 // 1./(n*variance)
369 //const Double_t rln=1.;
370 //const Double_t rld=(e2suml-esuml*esuml/wl);
371 //-------------------------------------------
372
373 // REPLACE BY???
374 e2sumr-=u*f*f; // e2sumr = square - e2suml
375 esumr -=u*f; // esumr = mean - esuml
376 wr -=u; // wr = tclasspop[0] - wl
377
378 //-------------------------------------------
379 // resolution
380 //const Double_t rrn=(wr*e2sumr-esumr*esumr)*wr;
381 //const Double_t rrd=(wr-1.)*esumr*esumr;
382
383 // resolution times n
384 //const Double_t rrn=(wr*e2sumr-esumr*esumr)*wr;
385 //const Double_t rrd=esumr*esumr;
386
387 // sigma
388 //const Double_t rrn=(e2sumr-esumr*esumr/wr);
389 //const Double_t rrd=(wr-1.);
390
391 // sigma times n
392 const Double_t rrn=(e2sumr-esumr*esumr/wr);
393 const Double_t rrd=1.;
394
395 // 1./(n*variance)
396 //const Double_t rrn=1.;
397 //const Double_t rrd=(e2sumr-esumr*esumr/wr);
398 //-------------------------------------------
399
400 if (datarang[mn+nc]>=datarang[mn+datasort[mn+nsp+1]])
401 continue;
402
403 //if (TMath::Min(rrd,rld)<=1.0e-5)
404 // continue;
405
406 const Double_t crit=(rln/rld)+(rrn/rrd);
407 if (!TMath::Finite(crit))
408 continue;
409
410 if (crit>=critvar) continue;
411
412 nbestvar=nsp;
413 critvar=crit;
414 }
415
416 if (critvar>=critmin) continue;
417
418 msplit=mvar;
419 nbest=nbestvar;
420 critmin=critvar;
421 }
422
423 decsplit=crit0-critmin;
424
425 //return critmin>1.0e20 ? 1 : 0;
426 return decsplit<0 ? 1 : 0;
427}
428
429void MRanTree::MoveData(MArrayI &datasort,Int_t ndstart, Int_t ndend,
430 MArrayI &idmove,MArrayI &ncase,Int_t msplit,
431 Int_t nbest,Int_t &ndendl)
432{
433 // This is the heart of the BuildTree construction. Based on the best split
434 // the data in the part of datasort corresponding to the current node is moved to the
435 // left if it belongs to the left child and right if it belongs to the right child-node.
436 const Int_t numdata = ncase.GetSize();
437 const Int_t mdim = fGiniDec.GetSize();
438
439 MArrayI tdatasort(numdata);
440
441 // compute idmove = indicator of case nos. going left
442 for (Int_t nsp=ndstart;nsp<=ndend;nsp++)
443 {
444 const Int_t &nc=datasort[msplit*numdata+nsp];
445 idmove[nc]= nsp<=nbest?1:0;
446 }
447 ndendl=nbest;
448
449 // shift case. nos. right and left for numerical variables.
450 for(Int_t msh=0;msh<mdim;msh++)
451 {
452 Int_t k=ndstart-1;
453 for (Int_t n=ndstart;n<=ndend;n++)
454 {
455 const Int_t &ih=datasort[msh*numdata+n];
456 if (idmove[ih]==1)
457 tdatasort[++k]=datasort[msh*numdata+n];
458 }
459
460 for (Int_t n=ndstart;n<=ndend;n++)
461 {
462 const Int_t &ih=datasort[msh*numdata+n];
463 if (idmove[ih]==0)
464 tdatasort[++k]=datasort[msh*numdata+n];
465 }
466
467 for(Int_t m=ndstart;m<=ndend;m++)
468 datasort[msh*numdata+m]=tdatasort[m];
469 }
470
471 // compute case nos. for right and left nodes.
472
473 for(Int_t n=ndstart;n<=ndend;n++)
474 ncase[n]=datasort[msplit*numdata+n];
475}
476
477void MRanTree::BuildTree(MArrayI &datasort,const MArrayI &datarang, const MArrayF &hadtrue,
478 const MArrayI &idclass, MArrayI &bestsplit, MArrayI &bestsplitnext,
479 const MArrayF &tclasspop, const Float_t &tmean, const Float_t &tsquare, const MArrayF &winbag,
480 Int_t ninbag, const int nclass)
481{
482 // Buildtree consists of repeated calls to two void functions, FindBestSplit and MoveData.
483 // Findbestsplit does just that--it finds the best split of the current node.
484 // MoveData moves the data in the split node right and left so that the data
485 // corresponding to each child node is contiguous.
486 //
487 // buildtree bookkeeping:
488 // ncur is the total number of nodes to date. nodestatus(k)=1 if the kth node has been split.
489 // nodestatus(k)=2 if the node exists but has not yet been split, and =-1 if the node is
490 // terminal. A node is terminal if its size is below a threshold value, or if it is all
491 // one class, or if all the data-values are equal. If the current node k is split, then its
492 // children are numbered ncur+1 (left), and ncur+2(right), ncur increases to ncur+2 and
493 // the next node to be split is numbered k+1. When no more nodes can be split, buildtree
494 // returns.
495 const Int_t mdim = fGiniDec.GetSize();
496 const Int_t nrnodes = fBestSplit.GetSize();
497 const Int_t numdata = (nrnodes-1)/2;
498
499 MArrayI nodepop(nrnodes);
500 MArrayI nodestart(nrnodes);
501 MArrayI parent(nrnodes);
502
503 MArrayI ncase(numdata);
504 MArrayI idmove(numdata);
505 MArrayI iv(mdim);
506
507 MArrayF classpop(nrnodes*nclass);//nclass
508 MArrayI nodestatus(nrnodes);
509
510 for (Int_t j=0;j<nclass;j++)
511 classpop[j*nrnodes+0]=tclasspop[j];
512
513 MArrayF mean(nrnodes);
514 MArrayF square(nrnodes);
515 MArrayF lclasspop(tclasspop);
516
517 mean[0]=tmean;
518 square[0]=tsquare;
519
520
521 Int_t ncur=0;
522 nodepop[0]=ninbag;
523 nodestatus[0]=2;
524
525 // start main loop
526 for (Int_t kbuild=0; kbuild<nrnodes; kbuild++)
527 {
528 if (kbuild>ncur) break;
529 if (nodestatus[kbuild]!=2) continue;
530
531 // initialize for next call to FindBestSplit
532
533 const Int_t ndstart=nodestart[kbuild];
534 const Int_t ndend=ndstart+nodepop[kbuild]-1;
535
536 for (Int_t j=0;j<nclass;j++)
537 lclasspop[j]=classpop[j*nrnodes+kbuild];
538
539 Int_t msplit, nbest;
540 Float_t decsplit=0;
541
542 if ((this->*FindBestSplit)(datasort,datarang,hadtrue,idclass,ndstart,
543 ndend, lclasspop,mean[kbuild],square[kbuild],msplit,decsplit,
544 nbest,winbag,nclass))
545 {
546 nodestatus[kbuild]=-1;
547 continue;
548 }
549
550 fBestVar[kbuild]=msplit;
551 fGiniDec[msplit]+=decsplit;
552
553 bestsplit[kbuild]=datasort[msplit*numdata+nbest];
554 bestsplitnext[kbuild]=datasort[msplit*numdata+nbest+1];
555
556 Int_t ndendl;
557 MoveData(datasort,ndstart,ndend,idmove,ncase,
558 msplit,nbest,ndendl);
559
560 // leftnode no.= ncur+1, rightnode no. = ncur+2.
561 nodepop[ncur+1]=ndendl-ndstart+1;
562 nodepop[ncur+2]=ndend-ndendl;
563 nodestart[ncur+1]=ndstart;
564 nodestart[ncur+2]=ndendl+1;
565
566 // find class populations in both nodes
567 for (Int_t n=ndstart;n<=ndendl;n++)
568 {
569 const Int_t &nc=ncase[n];
570 const int j=idclass[nc];
571
572 // statistics left from cut
573 mean[ncur+1]+=hadtrue[nc]*winbag[nc];
574 square[ncur+1]+=hadtrue[nc]*hadtrue[nc]*winbag[nc];
575
576 // sum of weights left from cut
577 classpop[j*nrnodes+ncur+1]+=winbag[nc];
578 }
579
580 for (Int_t n=ndendl+1;n<=ndend;n++)
581 {
582 const Int_t &nc=ncase[n];
583 const int j=idclass[nc];
584
585 // statistics right from cut
586 mean[ncur+2] +=hadtrue[nc]*winbag[nc];
587 square[ncur+2]+=hadtrue[nc]*hadtrue[nc]*winbag[nc];
588
589 // sum of weights right from cut
590 classpop[j*nrnodes+ncur+2]+=winbag[nc];
591 }
592
593 // check on nodestatus
594
595 nodestatus[ncur+1]=2;
596 nodestatus[ncur+2]=2;
597 if (nodepop[ncur+1]<=fNdSize) nodestatus[ncur+1]=-1;
598 if (nodepop[ncur+2]<=fNdSize) nodestatus[ncur+2]=-1;
599
600
601 Double_t popt1=0;
602 Double_t popt2=0;
603 for (Int_t j=0;j<nclass;j++)
604 {
605 popt1+=classpop[j*nrnodes+ncur+1];
606 popt2+=classpop[j*nrnodes+ncur+2];
607 }
608
609 if(fClassify)
610 {
611 // check if only members of one class in node
612 for (Int_t j=0;j<nclass;j++)
613 {
614 if (classpop[j*nrnodes+ncur+1]==popt1) nodestatus[ncur+1]=-1;
615 if (classpop[j*nrnodes+ncur+2]==popt2) nodestatus[ncur+2]=-1;
616 }
617 }
618
619 fTreeMap1[kbuild]=ncur+1;
620 fTreeMap2[kbuild]=ncur+2;
621 parent[ncur+1]=kbuild;
622 parent[ncur+2]=kbuild;
623 nodestatus[kbuild]=1;
624 ncur+=2;
625 if (ncur>=nrnodes) break;
626 }
627
628 // determine number of nodes
629 fNumNodes=nrnodes;
630 for (Int_t k=nrnodes-1;k>=0;k--)
631 {
632 if (nodestatus[k]==0) fNumNodes-=1;
633 if (nodestatus[k]==2) nodestatus[k]=-1;
634 }
635
636 fNumEndNodes=0;
637 for (Int_t kn=0;kn<fNumNodes;kn++)
638 if(nodestatus[kn]==-1)
639 {
640 fNumEndNodes++;
641
642 Double_t pp=0;
643 for (Int_t j=0;j<nclass;j++)
644 {
645 if(classpop[j*nrnodes+kn]>pp)
646 {
647 // class + status of node kn coded into fBestVar[kn]
648 fBestVar[kn]=j-nclass;
649 pp=classpop[j*nrnodes+kn];
650 }
651 }
652
653 float sum=0;
654 for(int i=0;i<nclass;i++) sum+=classpop[i*nrnodes+kn];
655
656 fBestSplit[kn]=mean[kn]/sum;
657 }
658}
659
660Double_t MRanTree::TreeHad(const Float_t *evt)
661{
662 // to optimize on storage space node status and node class
663 // are coded into fBestVar:
664 // status of node kt = TMath::Sign(1,fBestVar[kt])
665 // class of node kt = fBestVar[kt]+2 (class defined by larger
666 // node population, actually not used)
667 // hadronness assigned to node kt = fBestSplit[kt]
668
669 // To get rid of the range check of the root classes
670 const Float_t *split = fBestSplit.GetArray();
671 const Int_t *map1 = fTreeMap1.GetArray();
672 const Int_t *map2 = fTreeMap2.GetArray();
673 const Int_t *best = fBestVar.GetArray();
674
675 Int_t kt=0;
676 for (Int_t k=0; k<fNumNodes; k++)
677 {
678 if (best[kt]<0)
679 break;
680
681 const Int_t m=best[kt];
682 kt = evt[m]<=split[kt] ? map1[kt] : map2[kt];
683 }
684
685 return split[kt];
686}
687
688Double_t MRanTree::TreeHad(const TVector &event)
689{
690 return TreeHad(event.GetMatrixArray());
691}
692
693Double_t MRanTree::TreeHad(const TMatrixRow &event)
694{
695 return TreeHad(event.GetPtr());
696}
697
698Double_t MRanTree::TreeHad(const TMatrix &m, Int_t ievt)
699{
700#if ROOT_VERSION_CODE < ROOT_VERSION(4,00,8)
701 return TreeHad(TMatrixRow(m, ievt));
702#else
703 return TreeHad(TMatrixFRow_const(m, ievt));
704#endif
705}
706
707Bool_t MRanTree::AsciiWrite(ostream &out) const
708{
709 TString str;
710 Int_t k;
711
712 out.width(5);out<<fNumNodes<<endl;
713
714 for (k=0;k<fNumNodes;k++)
715 {
716 str=Form("%f",GetBestSplit(k));
717
718 out.width(5); out << k;
719 out.width(5); out << GetNodeStatus(k);
720 out.width(5); out << GetTreeMap1(k);
721 out.width(5); out << GetTreeMap2(k);
722 out.width(5); out << GetBestVar(k);
723 out.width(15); out << str<<endl;
724 out.width(5); out << GetNodeClass(k);
725 }
726 out<<endl;
727
728 return k==fNumNodes;
729}
Note: See TracBrowser for help on using the repository browser.