source: trunk/MagicSoft/Mars/mranforest/MRanTree.cc@ 7693

Last change on this file since 7693 was 7693, checked in by tbretz, 18 years ago
*** empty log message ***
File size: 20.5 KB
Line 
1/* ======================================================================== *\
2!
3! *
4! * This file is part of MARS, the MAGIC Analysis and Reconstruction
5! * Software. It is distributed to you in the hope that it can be a useful
6! * and timesaving tool in analysing Data of imaging Cerenkov telescopes.
7! * It is distributed WITHOUT ANY WARRANTY.
8! *
9! * Permission to use, copy, modify and distribute this software and its
10! * documentation for any purpose is hereby granted without fee,
11! * provided that the above copyright notice appear in all copies and
12! * that both that copyright notice and this permission notice appear
13! * in supporting documentation. It is provided "as is" without express
14! * or implied warranty.
15! *
16!
17!
18! Author(s): Thomas Hengstebeck 3/2003 <mailto:hengsteb@physik.hu-berlin.de>
19!
20! Copyright: MAGIC Software Development, 2000-2005
21!
22!
23\* ======================================================================== */
24
25/////////////////////////////////////////////////////////////////////////////
26//
27// MRanTree
28//
29// ParameterContainer for Tree structure
30//
31/////////////////////////////////////////////////////////////////////////////
32#include "MRanTree.h"
33
34#include <iostream>
35
36#include <TVector.h>
37#include <TMatrix.h>
38#include <TRandom.h>
39
40#include "MArrayI.h"
41#include "MArrayF.h"
42
43#include "MLog.h"
44#include "MLogManip.h"
45
46ClassImp(MRanTree);
47
48using namespace std;
49
50
51// --------------------------------------------------------------------------
52// Default constructor.
53//
54MRanTree::MRanTree(const char *name, const char *title):fClassify(kTRUE),fNdSize(0), fNumTry(3)
55{
56
57 fName = name ? name : "MRanTree";
58 fTitle = title ? title : "Storage container for structure of a single tree";
59}
60
61// --------------------------------------------------------------------------
62// Copy constructor
63//
64MRanTree::MRanTree(const MRanTree &tree)
65{
66 fName = tree.fName;
67 fTitle = tree.fTitle;
68
69 fClassify = tree.fClassify;
70 fNdSize = tree.fNdSize;
71 fNumTry = tree.fNumTry;
72
73 fNumNodes = tree.fNumNodes;
74 fNumEndNodes = tree.fNumEndNodes;
75
76 fBestVar = tree.fBestVar;
77 fTreeMap1 = tree.fTreeMap1;
78 fTreeMap2 = tree.fTreeMap2;
79 fBestSplit = tree.fBestSplit;
80 fGiniDec = tree.fGiniDec;
81}
82
83void MRanTree::SetNdSize(Int_t n)
84{
85 // threshold nodesize of terminal nodes, i.e. the training data is splitted
86 // until there is only pure date in the subsets(=terminal nodes) or the
87 // subset size is LE n
88
89 fNdSize=TMath::Max(1,n);//at least 1 event per node
90}
91
92void MRanTree::SetNumTry(Int_t n)
93{
94 // number of trials in random split selection:
95 // choose at least 1 variable to split in
96
97 fNumTry=TMath::Max(1,n);
98}
99
100void MRanTree::GrowTree(TMatrix *mat, const MArrayF &hadtrue, const MArrayI &idclass,
101 MArrayI &datasort, const MArrayI &datarang, MArrayF &tclasspop,
102 const Float_t &mean, const Float_t &square, const MArrayI &jinbag, const MArrayF &winbag,
103 const int nclass)
104{
105 // arrays have to be initialized with generous size, so number of total nodes (nrnodes)
106 // is estimated for worst case
107 const Int_t numdim =mat->GetNcols();
108 const Int_t numdata=winbag.GetSize();
109 const Int_t nrnodes=2*numdata+1;
110
111 // number of events in bootstrap sample
112 Int_t ninbag=0;
113 for (Int_t n=0;n<numdata;n++) if(jinbag[n]==1) ninbag++;
114
115 MArrayI bestsplit(nrnodes);
116 MArrayI bestsplitnext(nrnodes);
117
118 fBestVar.Set(nrnodes); fBestVar.Reset();
119 fTreeMap1.Set(nrnodes); fTreeMap1.Reset();
120 fTreeMap2.Set(nrnodes); fTreeMap2.Reset();
121 fBestSplit.Set(nrnodes); fBestSplit.Reset();
122 fGiniDec.Set(numdim); fGiniDec.Reset();
123
124
125 if(fClassify)
126 FindBestSplit=&MRanTree::FindBestSplitGini;
127 else
128 FindBestSplit=&MRanTree::FindBestSplitSigma;
129
130 // tree growing
131 BuildTree(datasort,datarang,hadtrue,idclass,bestsplit, bestsplitnext,
132 tclasspop,mean,square,winbag,ninbag,nclass);
133
134 // post processing, determine cut (or split) values fBestSplit
135 for(Int_t k=0; k<nrnodes; k++)
136 {
137 if (GetNodeStatus(k)==-1)
138 continue;
139
140 const Int_t &bsp =bestsplit[k];
141 const Int_t &bspn=bestsplitnext[k];
142 const Int_t &msp =fBestVar[k];
143
144 fBestSplit[k] = ((*mat)(bsp, msp)+(*mat)(bspn,msp))/2;
145 }
146
147 // resizing arrays to save memory
148 fBestVar.Set(fNumNodes);
149 fTreeMap1.Set(fNumNodes);
150 fTreeMap2.Set(fNumNodes);
151 fBestSplit.Set(fNumNodes);
152}
153
154int MRanTree::FindBestSplitGini(const MArrayI &datasort,const MArrayI &datarang,
155 const MArrayF &hadtrue,const MArrayI &idclass,
156 Int_t ndstart,Int_t ndend, const MArrayF &tclasspop,
157 const Float_t &mean, const Float_t &square, Int_t &msplit,
158 Float_t &decsplit,Int_t &nbest, const MArrayF &winbag,
159 const int nclass)
160{
161 const Int_t nrnodes = fBestSplit.GetSize();
162 const Int_t numdata = (nrnodes-1)/2;
163 const Int_t mdim = fGiniDec.GetSize();
164
165 // For the best split, msplit is the index of the variable (e.g Hillas par.,
166 // zenith angle ,...)
167 // split on. decsplit is the decreae in impurity measured by Gini-index.
168 // nsplit is the case number of value of msplit split on,
169 // and nsplitnext is the case number of the next larger value of msplit.
170
171 Int_t nbestvar=0;
172
173 // compute initial values of numerator and denominator of Gini-index,
174 // Gini index= pno/dno
175 Double_t pno=0;
176 Double_t pdo=0;
177
178 for (Int_t j=0; j<nclass; j++)
179 {
180 pno+=tclasspop[j]*tclasspop[j];
181 pdo+=tclasspop[j];
182 }
183
184 const Double_t crit0=pno/pdo;
185
186 // start main loop through variables to find best split,
187 // (Gini-index as criterium crit)
188
189 Double_t critmax=-FLT_MAX;
190
191 // random split selection, number of trials = fNumTry
192 for (Int_t mt=0; mt<fNumTry; mt++)
193 {
194 const Int_t mvar=Int_t(gRandom->Rndm()*mdim);
195 const Int_t mn = mvar*numdata;
196
197 // Gini index = rrn/rrd+rln/rld
198 Double_t rrn=pno;
199 Double_t rrd=pdo;
200 Double_t rln=0;
201 Double_t rld=0;
202
203 MArrayF wl(nclass); // left node //nclass
204 MArrayF wr(tclasspop); // right node//nclass
205
206 Double_t critvar=-1.0e20;
207 for(Int_t nsp=ndstart;nsp<=ndend-1;nsp++)
208 {
209 const Int_t &nc = datasort[mn+nsp];
210 const Int_t &k = idclass[nc];
211 const Float_t &u = winbag[nc];
212
213 // do classification, Gini index as split rule
214 rln+=u*(2*wl[k]+u);
215 rrn+=u*(-2*wr[k]+u);
216
217 rld+=u;
218 rrd-=u;
219
220 wl[k]+=u;
221 wr[k]-=u;
222
223 if (datarang[mn+nc]>=datarang[mn+datasort[mn+nsp+1]])
224 continue;
225
226 if (TMath::Min(rrd,rld)<=1.0e-5)
227 continue;
228
229 const Double_t crit=(rln/rld)+(rrn/rrd);
230
231 if (crit<=critvar) continue;
232
233 nbestvar=nsp;
234 critvar=crit;
235 }
236
237 if (critvar<=critmax) continue;
238
239 msplit=mvar;
240 nbest=nbestvar;
241 critmax=critvar;
242 }
243
244 decsplit=critmax-crit0;
245
246 return critmax<-1.0e10 ? 1 : 0;
247}
248
249int MRanTree::FindBestSplitSigma(const MArrayI &datasort,const MArrayI &datarang,
250 const MArrayF &hadtrue, const MArrayI &idclass,
251 Int_t ndstart,Int_t ndend, const MArrayF &tclasspop,
252 const Float_t &mean, const Float_t &square, Int_t &msplit,
253 Float_t &decsplit,Int_t &nbest, const MArrayF &winbag,
254 const int nclass)
255{
256 const Int_t nrnodes = fBestSplit.GetSize();
257 const Int_t numdata = (nrnodes-1)/2;
258 const Int_t mdim = fGiniDec.GetSize();
259
260 // For the best split, msplit is the index of the variable (e.g Hillas par., zenith angle ,...)
261 // split on. decsplit is the decreae in impurity measured by Gini-index.
262 // nsplit is the case number of value of msplit split on,
263 // and nsplitnext is the case number of the next larger value of msplit.
264
265 Int_t nbestvar=0;
266
267 // compute initial values of numerator and denominator of split-index,
268
269 // resolution
270 //Double_t pno=-(tclasspop[0]*square-mean*mean)*tclasspop[0];
271 //Double_t pdo= (tclasspop[0]-1.)*mean*mean;
272
273 // n*resolution
274 //Double_t pno=-(tclasspop[0]*square-mean*mean)*tclasspop[0];
275 //Double_t pdo= mean*mean;
276
277 // variance
278 //Double_t pno=-(square-mean*mean/tclasspop[0]);
279 //Double_t pdo= (tclasspop[0]-1.);
280
281 // n*variance
282 Double_t pno= (square-mean*mean/tclasspop[0]);
283 Double_t pdo= 1.;
284
285 // 1./(n*variance)
286 //Double_t pno= 1.;
287 //Double_t pdo= (square-mean*mean/tclasspop[0]);
288
289 const Double_t crit0=pno/pdo;
290
291 // start main loop through variables to find best split,
292
293 Double_t critmin=1.0e40;
294
295 // random split selection, number of trials = fNumTry
296 for (Int_t mt=0; mt<fNumTry; mt++)
297 {
298 const Int_t mvar= gRandom->Integer(mdim);
299 const Int_t mn = mvar*numdata;
300
301 Double_t rrn=0, rrd=0, rln=0, rld=0;
302
303 Double_t esumr =mean;
304 Double_t e2sumr=square;
305 Double_t esuml =0;
306 Double_t e2suml=0;
307
308 float wl=0.;// left node
309 float wr=tclasspop[0]; // right node
310
311 Double_t critvar=critmin;
312 for(Int_t nsp=ndstart;nsp<=ndend-1;nsp++)
313 {
314 const Int_t &nc=datasort[mn+nsp];
315 const Float_t &f=hadtrue[nc];;
316 const Float_t &u=winbag[nc];
317
318 e2sumr-=u*f*f;
319 esumr -=u*f;
320 wr -=u;
321
322 //-------------------------------------------
323 // resolution
324 //rrn=(wr*e2sumr-esumr*esumr)*wr;
325 //rrd=(wr-1.)*esumr*esumr;
326
327 // resolution times n
328 //rrn=(wr*e2sumr-esumr*esumr)*wr;
329 //rrd=esumr*esumr;
330
331 // sigma
332 //rrn=(e2sumr-esumr*esumr/wr);
333 //rrd=(wr-1.);
334
335 // sigma times n
336 rrn=(e2sumr-esumr*esumr/wr);
337 rrd=1.;
338
339 // 1./(n*variance)
340 //rrn=1.;
341 //rrd=(e2sumr-esumr*esumr/wr);
342 //-------------------------------------------
343
344 e2suml+=u*f*f;
345 esuml +=u*f;
346 wl +=u;
347
348 //-------------------------------------------
349 // resolution
350 //rln=(wl*e2suml-esuml*esuml)*wl;
351 //rld=(wl-1.)*esuml*esuml;
352
353 // resolution times n
354 //rln=(wl*e2suml-esuml*esuml)*wl;
355 //rld=esuml*esuml;
356
357 // sigma
358 //rln=(e2suml-esuml*esuml/wl);
359 //rld=(wl-1.);
360
361 // sigma times n
362 rln=(e2suml-esuml*esuml/wl);
363 rld=1.;
364
365 // 1./(n*variance)
366 //rln=1.;
367 //rld=(e2suml-esuml*esuml/wl);
368 //-------------------------------------------
369
370 if (datarang[mn+nc]>=datarang[mn+datasort[mn+nsp+1]])
371 continue;
372
373 if (TMath::Min(rrd,rld)<=1.0e-5)
374 continue;
375
376 const Double_t crit=(rln/rld)+(rrn/rrd);
377
378 if (crit>=critvar) continue;
379
380 nbestvar=nsp;
381 critvar=crit;
382 }
383
384 if (critvar>=critmin) continue;
385
386 msplit=mvar;
387 nbest=nbestvar;
388 critmin=critvar;
389 }
390
391 decsplit=crit0-critmin;
392
393 //return critmin>1.0e20 ? 1 : 0;
394 return decsplit<0 ? 1 : 0;
395}
396
397void MRanTree::MoveData(MArrayI &datasort,Int_t ndstart, Int_t ndend,
398 MArrayI &idmove,MArrayI &ncase,Int_t msplit,
399 Int_t nbest,Int_t &ndendl)
400{
401 // This is the heart of the BuildTree construction. Based on the best split
402 // the data in the part of datasort corresponding to the current node is moved to the
403 // left if it belongs to the left child and right if it belongs to the right child-node.
404 const Int_t numdata = ncase.GetSize();
405 const Int_t mdim = fGiniDec.GetSize();
406
407 MArrayI tdatasort(numdata);
408
409 // compute idmove = indicator of case nos. going left
410 for (Int_t nsp=ndstart;nsp<=ndend;nsp++)
411 {
412 const Int_t &nc=datasort[msplit*numdata+nsp];
413 idmove[nc]= nsp<=nbest?1:0;
414 }
415 ndendl=nbest;
416
417 // shift case. nos. right and left for numerical variables.
418 for(Int_t msh=0;msh<mdim;msh++)
419 {
420 Int_t k=ndstart-1;
421 for (Int_t n=ndstart;n<=ndend;n++)
422 {
423 const Int_t &ih=datasort[msh*numdata+n];
424 if (idmove[ih]==1)
425 tdatasort[++k]=datasort[msh*numdata+n];
426 }
427
428 for (Int_t n=ndstart;n<=ndend;n++)
429 {
430 const Int_t &ih=datasort[msh*numdata+n];
431 if (idmove[ih]==0)
432 tdatasort[++k]=datasort[msh*numdata+n];
433 }
434
435 for(Int_t m=ndstart;m<=ndend;m++)
436 datasort[msh*numdata+m]=tdatasort[m];
437 }
438
439 // compute case nos. for right and left nodes.
440
441 for(Int_t n=ndstart;n<=ndend;n++)
442 ncase[n]=datasort[msplit*numdata+n];
443}
444
445void MRanTree::BuildTree(MArrayI &datasort,const MArrayI &datarang, const MArrayF &hadtrue,
446 const MArrayI &idclass, MArrayI &bestsplit, MArrayI &bestsplitnext,
447 MArrayF &tclasspop, const Float_t &tmean, const Float_t &tsquare, const MArrayF &winbag,
448 Int_t ninbag, const int nclass)
449{
450 // Buildtree consists of repeated calls to two void functions, FindBestSplit and MoveData.
451 // Findbestsplit does just that--it finds the best split of the current node.
452 // MoveData moves the data in the split node right and left so that the data
453 // corresponding to each child node is contiguous.
454 //
455 // buildtree bookkeeping:
456 // ncur is the total number of nodes to date. nodestatus(k)=1 if the kth node has been split.
457 // nodestatus(k)=2 if the node exists but has not yet been split, and =-1 if the node is
458 // terminal. A node is terminal if its size is below a threshold value, or if it is all
459 // one class, or if all the data-values are equal. If the current node k is split, then its
460 // children are numbered ncur+1 (left), and ncur+2(right), ncur increases to ncur+2 and
461 // the next node to be split is numbered k+1. When no more nodes can be split, buildtree
462 // returns.
463 const Int_t mdim = fGiniDec.GetSize();
464 const Int_t nrnodes = fBestSplit.GetSize();
465 const Int_t numdata = (nrnodes-1)/2;
466
467 MArrayI nodepop(nrnodes);
468 MArrayI nodestart(nrnodes);
469 MArrayI parent(nrnodes);
470
471 MArrayI ncase(numdata);
472 MArrayI idmove(numdata);
473 MArrayI iv(mdim);
474
475 MArrayF classpop(nrnodes*nclass);//nclass
476 MArrayI nodestatus(nrnodes);
477
478 for (Int_t j=0;j<nclass;j++)
479 classpop[j*nrnodes+0]=tclasspop[j];
480
481 MArrayF mean(nrnodes);
482 MArrayF square(nrnodes);
483
484 mean[0]=tmean;
485 square[0]=tsquare;
486
487
488 Int_t ncur=0;
489 nodepop[0]=ninbag;
490 nodestatus[0]=2;
491
492 // start main loop
493 for (Int_t kbuild=0; kbuild<nrnodes; kbuild++)
494 {
495 if (kbuild>ncur) break;
496 if (nodestatus[kbuild]!=2) continue;
497
498 // initialize for next call to FindBestSplit
499
500 const Int_t ndstart=nodestart[kbuild];
501 const Int_t ndend=ndstart+nodepop[kbuild]-1;
502
503 for (Int_t j=0;j<nclass;j++)
504 tclasspop[j]=classpop[j*nrnodes+kbuild];
505
506 Int_t msplit, nbest;
507 Float_t decsplit=0;
508
509 if ((this->*FindBestSplit)(datasort,datarang,hadtrue,idclass,ndstart,
510 ndend, tclasspop,mean[kbuild],square[kbuild],msplit,decsplit,
511 nbest,winbag,nclass))
512 {
513 nodestatus[kbuild]=-1;
514 continue;
515 }
516
517 fBestVar[kbuild]=msplit;
518 fGiniDec[msplit]+=decsplit;
519
520 bestsplit[kbuild]=datasort[msplit*numdata+nbest];
521 bestsplitnext[kbuild]=datasort[msplit*numdata+nbest+1];
522
523 Int_t ndendl;
524 MoveData(datasort,ndstart,ndend,idmove,ncase,
525 msplit,nbest,ndendl);
526
527 // leftnode no.= ncur+1, rightnode no. = ncur+2.
528 nodepop[ncur+1]=ndendl-ndstart+1;
529 nodepop[ncur+2]=ndend-ndendl;
530 nodestart[ncur+1]=ndstart;
531 nodestart[ncur+2]=ndendl+1;
532
533 // find class populations in both nodes
534 for (Int_t n=ndstart;n<=ndendl;n++)
535 {
536 const Int_t &nc=ncase[n];
537 const int j=idclass[nc];
538
539 mean[ncur+1]+=hadtrue[nc]*winbag[nc];
540 square[ncur+1]+=hadtrue[nc]*hadtrue[nc]*winbag[nc];
541
542 classpop[j*nrnodes+ncur+1]+=winbag[nc];
543 }
544
545 for (Int_t n=ndendl+1;n<=ndend;n++)
546 {
547 const Int_t &nc=ncase[n];
548 const int j=idclass[nc];
549
550 mean[ncur+2] +=hadtrue[nc]*winbag[nc];
551 square[ncur+2]+=hadtrue[nc]*hadtrue[nc]*winbag[nc];
552
553 classpop[j*nrnodes+ncur+2]+=winbag[nc];
554 }
555
556 // check on nodestatus
557
558 nodestatus[ncur+1]=2;
559 nodestatus[ncur+2]=2;
560 if (nodepop[ncur+1]<=fNdSize) nodestatus[ncur+1]=-1;
561 if (nodepop[ncur+2]<=fNdSize) nodestatus[ncur+2]=-1;
562
563
564 Double_t popt1=0;
565 Double_t popt2=0;
566 for (Int_t j=0;j<nclass;j++)
567 {
568 popt1+=classpop[j*nrnodes+ncur+1];
569 popt2+=classpop[j*nrnodes+ncur+2];
570 }
571
572 if(fClassify)
573 {
574 // check if only members of one class in node
575 for (Int_t j=0;j<nclass;j++)
576 {
577 if (classpop[j*nrnodes+ncur+1]==popt1) nodestatus[ncur+1]=-1;
578 if (classpop[j*nrnodes+ncur+2]==popt2) nodestatus[ncur+2]=-1;
579 }
580 }
581
582 fTreeMap1[kbuild]=ncur+1;
583 fTreeMap2[kbuild]=ncur+2;
584 parent[ncur+1]=kbuild;
585 parent[ncur+2]=kbuild;
586 nodestatus[kbuild]=1;
587 ncur+=2;
588 if (ncur>=nrnodes) break;
589 }
590
591 // determine number of nodes
592 fNumNodes=nrnodes;
593 for (Int_t k=nrnodes-1;k>=0;k--)
594 {
595 if (nodestatus[k]==0) fNumNodes-=1;
596 if (nodestatus[k]==2) nodestatus[k]=-1;
597 }
598
599 fNumEndNodes=0;
600 for (Int_t kn=0;kn<fNumNodes;kn++)
601 if(nodestatus[kn]==-1)
602 {
603 fNumEndNodes++;
604
605 Double_t pp=0;
606 for (Int_t j=0;j<nclass;j++)
607 {
608 if(classpop[j*nrnodes+kn]>pp)
609 {
610 // class + status of node kn coded into fBestVar[kn]
611 fBestVar[kn]=j-nclass;
612 pp=classpop[j*nrnodes+kn];
613 }
614 }
615
616 float sum=0;
617 for(int i=0;i<nclass;i++) sum+=classpop[i*nrnodes+kn];
618
619 fBestSplit[kn]=mean[kn]/sum;
620 }
621}
622
623Double_t MRanTree::TreeHad(const TVector &event)
624{
625 Int_t kt=0;
626 // to optimize on storage space node status and node class
627 // are coded into fBestVar:
628 // status of node kt = TMath::Sign(1,fBestVar[kt])
629 // class of node kt = fBestVar[kt]+2 (class defined by larger
630 // node population, actually not used)
631 // hadronness assigned to node kt = fBestSplit[kt]
632
633 for (Int_t k=0;k<fNumNodes;k++)
634 {
635 if (fBestVar[kt]<0)
636 break;
637
638 const Int_t m=fBestVar[kt];
639 kt = event(m)<=fBestSplit[kt] ? fTreeMap1[kt] : fTreeMap2[kt];
640 }
641
642 return fBestSplit[kt];
643}
644
645Double_t MRanTree::TreeHad(const TMatrixRow &event)
646{
647 Int_t kt=0;
648 // to optimize on storage space node status and node class
649 // are coded into fBestVar:
650 // status of node kt = TMath::Sign(1,fBestVar[kt])
651 // class of node kt = fBestVar[kt]+2 (class defined by larger
652 // node population, actually not used)
653 // hadronness assigned to node kt = fBestSplit[kt]
654
655 for (Int_t k=0;k<fNumNodes;k++)
656 {
657 if (fBestVar[kt]<0)
658 break;
659
660 const Int_t m=fBestVar[kt];
661 kt = event(m)<=fBestSplit[kt] ? fTreeMap1[kt] : fTreeMap2[kt];
662 }
663
664 return fBestSplit[kt];
665}
666
667Double_t MRanTree::TreeHad(const TMatrix &m, Int_t ievt)
668{
669#if ROOT_VERSION_CODE < ROOT_VERSION(4,00,8)
670 return TreeHad(TMatrixRow(m, ievt));
671#else
672 return TreeHad(TMatrixFRow_const(m, ievt));
673#endif
674}
675
676Bool_t MRanTree::AsciiWrite(ostream &out) const
677{
678 TString str;
679 Int_t k;
680
681 out.width(5);out<<fNumNodes<<endl;
682
683 for (k=0;k<fNumNodes;k++)
684 {
685 str=Form("%f",GetBestSplit(k));
686
687 out.width(5); out << k;
688 out.width(5); out << GetNodeStatus(k);
689 out.width(5); out << GetTreeMap1(k);
690 out.width(5); out << GetTreeMap2(k);
691 out.width(5); out << GetBestVar(k);
692 out.width(15); out << str<<endl;
693 out.width(5); out << GetNodeClass(k);
694 }
695 out<<endl;
696
697 return k==fNumNodes;
698}
Note: See TracBrowser for help on using the repository browser.