Version 31 (modified by 6 years ago) ( diff ) | ,
---|
Table of Contents
Good weather Crab data
You want to create a file with good weather data with zenith distance smaller than 35° of Crab of the season 2017/18. The file should contain most of the image parameters with no cuts applied so that you can optimize cuts. Here is the query:
SELECT Images.MeanX, Images.MeanY, Images.Width, Images.Length, Images.CosDelta, Images.SinDelta, Images.M3Long, Images.SlopeLong, Images.Leakage1, Images.NumIslands, Images.NumUsedPixels, Images.Size, Position.X, Position.Y FROM Images LEFT JOIN Position USING (FileId, EvtNumber) LEFT JOIN RunInfo USING (FileId) WHERE fSourceKey=5 AND fRunTypeKey=1 AND FileId BETWEEN 170600000 AND 180500000 AND fZenithDistanceMax<35 AND fR750Cor>0.9*fR750Ref
You can run it, for example, with:
rootifysql --uri=fact:password@ihp-pc45.ethz.ch/factdata --out=crab2018.root
Optimize Area-cut
Assume you have written a large data set of good-weather, low zenith-distance Crab data with the previous query to a file. The following will give you a few example of how you can make use of that in your analysis. To speed up things or decrease the file size (715MB), you might want to add additional cuts depending on your analysis. If you want to apply a Theta-square cut, please refer to DatabaseBasedAnalysis#Dataretrieval.
#include <iostream> #include <iomanip> #include <TMath.h> #include <TH1.h> #include <TH2.h> #include <TProfile.h> #include <TChain.h> #include <TGraph.h> #include <TCanvas.h> Double_t LiMa(Double_t s, Double_t b, Double_t alpha=0.2) { const Double_t sum = s+b; if (s<0 || b<0 || alpha<=0) return -1; const Double_t l = s==0 ? 0 : s*TMath::Log(s/sum*(alpha+1)/alpha); const Double_t m = b==0 ? 0 : b*TMath::Log(b/sum*(alpha+1) ); return l+m<0 ? 0 : TMath::Sqrt((l+m)*2); } void optimarea() { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("crab2018.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels, Zd; // Connect the variables to the cordesponding leaves c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); c.SetBranchAddress("Size", &Size); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; // -------------------- Source dependent parameter calculation ------------------- // Create a histogram for on- and off-data TH1F hold("old", "", 100, 0, 1); TH1F hnew("onew", "", 100, 0, 1); TH2F h2area_on( "H_AvsS_on", "", 24, 1.5, 4.5, 200, -2, 0); TH2F h2area_of( "H_AvsS_of", "", 24, 1.5, 4.5, 200, -2, 0); TH2F h2area_sig("H_AvsS_sig", "", 24, 1.5, 4.5, 200, -2, 0); // Loop over all events for (int i=0; i<c.GetEntries(); i++) { // read the i-th event from the file c.GetEntry(i); // First calculate all cuts to speedup the analysis double area = TMath::Pi()*Width*Length; bool cutq = NumIslands<3.5 && NumUsedPixels>5.5 && Leakage1<0.1; if (!cutq) continue; // Loop over all wobble positions in the camera for (int angle=0; angle<360; angle+=60) { // -------------------- Source dependent parameter calculation ------------------- double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; double dx = MeanX - px*1.02; double dy = MeanY - py*1.02; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- double xi = 1.340 + 0.0755 *slope + 1.67972 *(1-1/(1+4.86232*Leakage1)); double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp = (sign1<0 || sign2<0 ? -xi : xi)*(1-Width/Length); double thetasq = disp_old*disp_old + dist*dist - 2*disp_old*dist*sqrt(1-lx*lx); if (thetasq>0.024) continue; if (angle==0) h2area_on.Fill(log10(Size), log10(area*mm2deg*mm2deg)); else h2area_of.Fill(log10(Size), log10(area*mm2deg*mm2deg)); } } for (int x=1; x<=h2area_sig.GetNbinsX(); x++) for (int y=1; y<=h2area_sig.GetNbinsY(); y++) { double on = h2area_on.Integral(x, x, 1, y); double of = h2area_of.Integral(x, x, 1, y); h2area_sig.SetBinContent(x, y, LiMa(on, of)); } TGraph gmax; for (int x=1; x<=h2area_sig.GetNbinsX(); x++) { TH1D *p = h2area_sig.ProjectionY("_py", x, x); double max = p->GetXaxis()->GetBinCenter(p->GetMaximumBin()); delete p; gmax.SetPoint(gmax.GetN(), h2area_sig.GetXaxis()->GetBinCenter(x), max); } h2area_on.SetStats(kFALSE); h2area_of.SetStats(kFALSE); h2area_sig.SetStats(kFALSE); TCanvas *canv = new TCanvas; canv->Divide(2,2); canv->cd(1); h2area_on.DrawCopy("colz"); gmax.DrawClone("*"); canv->cd(2); h2area_sig.DrawCopy("colz"); gmax.DrawClone("*"); canv->cd(3); h2area_of.DrawCopy("colz"); gmax.DrawClone("*"); canv->cd(4); }
Don't forget that this only gives you the best significance per bin in Size. To get the best detection significance, you will have to introduce an additional Size-cut.
Optimize Disp (Observable)
SELECT MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, NumIslands, NumUsedPixels, Size, X, Y FROM EventsMC LEFT JOIN PositionMC USING (FileId, EvtNumber) WHER /* ... do some data selection here ... */
The following root-macro (to be compiled!)
#include <iostream> #include <iomanip> #include <TMath.h> #include <TH1.h> #include <TH2.h> #include <TProfile.h> #include <TChain.h> #include <TGraph.h> #include <TCanvas.h> void optimdisp() { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("simulation.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels, Zd; // Connect the variables to the cordesponding leaves c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); c.SetBranchAddress("Size", &Size); c.SetBranchAddress("Zd", &Zd); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; // -------------------- Source dependent parameter calculation ------------------- // Create a histogram for on- and off-data TH1F hold("old", "", 100, 0, 1); TH1F hnew("onew", "", 100, 0, 1); TH2F h2slope("H_VsSlope", "", 75, -8, 7, 100, -2.5, 1.5); TProfile p2slope("P_VsSlope", "", 75, -8, 7); TH2F h2leak( "H_VsLeakage", "", 75, 0, 0.15, 100, -2.5, 1.5); TH2F h2m3l( "H_M3long", "", 75, -0.2, 0.6, 100, -2.5, 1.5); TH2F h2zd( "H_Zd", "", 30, 30, 60, 100, -2.5, 1.5); TH2F h2ni( "H_NumIsl", "", 10, 0.5, 10.5, 100, -2.5, 1.5); TH2F h2np( "H_NumPix", "", 10, 0.5, 100.5, 100, -2.5, 1.5); TH2F h2size( "H_Size", "", 30, 1.5, 4.5, 100, -2.5, 1.5); // Loop over all events for (int i=0; i<c.GetEntries(); i++) { // read the i-th event from the file c.GetEntry(i); // First calculate all cuts to speedup the analysis double area = TMath::Pi()*Width*Length; bool cutq = NumIslands<3.5 && NumUsedPixels>5.5 && Leakage1<0.1; if (!cutq) continue; bool cut0 = area < log10(Size)*898-1535; if (!cut0) continue; // -------------------- Source dependent parameter calculation ------------------- int angle = 0; double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; double dx = MeanX - px*1.02; double dy = MeanY - py*1.02; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- //double xi = 1.34723 + 0.15214 *slope + 0.970704*(1-1/(1+8.89826*Leakage1)); double xi_old = 1.39252 + 0.154247*slope + 1.67972 *(1-1/(1+4.86232*Leakage1)); double xi_new = 1.340 + 0.0755 *slope + 1.67972 *(1-1/(1+4.86232*Leakage1)); double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp_old = (sign1<0 || sign2<0 ? -xi_old : xi_old)*(1-Width/Length); double disp_new = (sign1<0 || sign2<0 ? -xi_new : xi_new)*(1-Width/Length); double thetasq_old = disp_old*disp_old + dist*dist - 2*disp_old*dist*sqrt(1-lx*lx); double thetasq_new = disp_new*disp_new + dist*dist - 2*disp_new*dist*sqrt(1-lx*lx); // Fill the on- and off-histograms hold.Fill(thetasq_old); hnew.Fill(thetasq_new); double residual = xi_new-dist/sqrt(1-lx*lx)/(1-Width/Length); h2slope.Fill(slope, residual); p2slope.Fill(slope, residual); h2leak.Fill(Leakage1, residual); h2m3l.Fill( m3l, residual); h2zd.Fill( Zd, residual); h2ni.Fill( NumIslands, residual); h2np.Fill( NumUsedPixels, residual); h2size.Fill(log10(Size), residual); } TCanvas *canv = new TCanvas; canv->Divide(2,2); canv->cd(1); gPad->SetGridy(); h2slope.DrawCopy("colz"); p2slope.DrawCopy("same"); canv->cd(2); gPad->SetGridy(); h2leak.DrawCopy("colz"); canv->cd(3); gPad->SetGridy(); //h2m3l.DrawCopy("colz"); h2zd.DrawCopy("colz"); //h2ni.DrawCopy("colz"); //h2np.DrawCopy("colz"); //h2size.DrawCopy("colz"); canv->cd(4); // Plot the result hold.SetLineColor(kRed); hnew.SetMinimum(0); hnew.DrawCopy(); hold.DrawCopy("same"); }
Optim Disp (Leakage==0)
#include <iostream> #include <iomanip> #include <TMath.h> #include <TH2.h> #include <TChain.h> #include <TCanvas.h> void optimdisp2() { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("simulation.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double FileId, EvtNumber, X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, SlopeSpreadWeighted, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels, Zd; // Connect the variables to the cordesponding leaves c.SetBranchAddress("FileId", &FileId); c.SetBranchAddress("EvtNumber", &EvtNumber); c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); c.SetBranchAddress("Size", &Size); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; // -------------------- Source dependent parameter calculation ------------------- TH2F hist("", "", 26, 1.300-0.0025, 1.430-0.0025, 20, 0.040-0.0025, 0.140-0.0025); for (float xi0=1.30; xi0<1.43; xi0+=0.005) for (float slope0=0.04; slope0<0.14; slope0+=0.005) { int cnt_on = 0; for (int i=0; i<c.GetEntries(); i++) { // read the i-th event from the file c.GetEntry(i); // First calculate all cuts to speedup the analysis double area = TMath::Pi()*Width*Length; // The abberation correction does increase also Width and Length by 1.02 bool cutq = NumIslands<3.5 && NumUsedPixels>5.5 && Leakage1==0; if (!cutq) continue; bool cut0 = area < log10(Size)*898-1535; if (!cut0) continue; int angle = 0; // -------------------- Source dependent parameter calculation ------------------- double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; double dx = MeanX - px*1.02; double dy = MeanY - py*1.02; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- double xi = xi0 + slope0 *slope; double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp = (sign1<0 || sign2<0 ? -xi : xi)*(1-Width/Length); double thetasq = disp*disp + dist*dist - 2*disp*dist*sqrt(1-lx*lx); if (thetasq<0.024) cnt_on++; } hist.Fill(xi0, slope0, cnt_on); cout << xi0 << " " << slope0 << " " << cnt_on << endl; } TCanvas *canv = new TCanvas; canv->SetTopMargin(0.01); hist.SetStats(kFALSE); hist.SetContour(100); hist.SetXTitle("Xi_{0} / deg"); hist.SetYTitle("Slope_{0} #dot ns/deg^{2} "); hist.GetYaxis()->SetTitleOffset(1.2); hist.DrawCopy("colz cont2"); }
Results in
Optim Disp (Leakage>0)
#include <iostream> #include <iomanip> #include <TMath.h> #include <TH2.h> #include <TChain.h> #include <TCanvas.h> void optimdisp3() { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("simulation.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double FileId, EvtNumber, X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, SlopeSpreadWeighted, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels, Zd; // Connect the variables to the cordesponding leaves c.SetBranchAddress("FileId", &FileId); c.SetBranchAddress("EvtNumber", &EvtNumber); c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); //c.SetBranchAddress("SlopeSpreadWeighted", &SlopeSpreadWeighted); c.SetBranchAddress("Size", &Size); c.SetBranchAddress("Zd", &Zd); //c.SetBranchAddress("ConcCOG", &ConcCOG); //c.SetBranchAddress("ConcCore", &ConcCore); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; //double abberation = 1.02; // -------------------- Source dependent parameter calculation ------------------- TH2F hist("", "", 32, 0.800-0.0250, 2.400-0.0250, 60, 3.000-0.0250, 6.000-0.0250); for (float l0=0.8; l0<2.4; l0+=0.05) for (float l1=3; l1<6; l1+=0.05) { int cnt_on = 0; for (int i=0; i<c.GetEntries(); i++) { // read the i-th event from the file c.GetEntry(i); // First calculate all cuts to speedup the analysis double area = TMath::Pi()*Width*Length; // The abberation correction does increase also Width and Length by 1.02 bool cutq = NumIslands<3.5 && NumUsedPixels>5.5 && Leakage1>0; if (!cutq) continue; bool cut0 = area < log10(Size)*898-1535; if (!cut0) continue; int angle = 0; // -------------------- Source dependent parameter calculation ------------------- double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; double dx = MeanX - px*1.02; double dy = MeanY - py*1.02; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- double xi = 1.36 + 0.085*slope + l0 *(1-1/(1+l1*Leakage1)); // Optim: 1.36 + 0.085 double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp = (sign1<0 || sign2<0 ? -xi : xi)*(1-Width/Length); double thetasq = disp*disp + dist*dist - 2*disp*dist*sqrt(1-lx*lx); if (thetasq<0.024) cnt_on++; } hist.Fill(l0, l1, cnt_on); cout << l0 << " " << l1 << " " << cnt_on << endl; } TCanvas *canv = new TCanvas; canv->SetTopMargin(0.01); hist.SetStats(kFALSE); hist.SetContour(100); hist.SetXTitle("L_{0} / deg"); hist.SetYTitle("L_{1} / deg"); hist.GetYaxis()->SetTitleOffset(1.2); hist.DrawCopy("colz cont2"); }
Results in
Optimize Disp (on Crab Data)
This is the same analysis using the Excess-Error on Crab Data.
#include <iostream> #include <iomanip> #include <TMath.h> #include <TH2.h> #include <TChain.h> #include <TCanvas.h> #include <TGraph.h> #include <TStopwatch.h> Double_t LiMa(Double_t s, Double_t b, Double_t alpha=0.2) { const Double_t sum = s+b; if (s<0 || b<0 || alpha<=0) return -1; const Double_t l = s==0 ? 0 : s*TMath::Log(s/sum*(alpha+1)/alpha); const Double_t m = b==0 ? 0 : b*TMath::Log(b/sum*(alpha+1) ); return l+m<0 ? 0 : TMath::Sqrt((l+m)*2); } Double_t Error(Double_t s, Double_t b, Double_t alpha=0.2) { const Double_t sN = s + alpha*alpha*b; const Double_t error = sN<0 ? 0 : TMath::Sqrt(sN); if (error==0) return 0; const Double_t Ns = s - alpha*b; return Ns<0 ? 0 : Ns/error; } double ganymed2(double xi0, double slope0) { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("crab-data-only.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double FileId, EvtNumber, X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, SlopeSpreadWeighted, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels; // Connect the variables to the cordesponding leaves c.SetBranchAddress("FileId", &FileId); c.SetBranchAddress("EvtNumber", &EvtNumber); c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); //c.SetBranchAddress("SlopeSpreadWeighted", &SlopeSpreadWeighted); c.SetBranchAddress("Size", &Size); //c.SetBranchAddress("ConcCOG", &ConcCOG); //c.SetBranchAddress("ConcCore", &ConcCore); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; //double abberation = 1.02; // -------------------- Source dependent parameter calculation ------------------- // Create a histogram for on- and off-data TH1F hon("on", "", 55, 0, 1); TH1F hoff("off", "", 55, 0, 1); long cnt_on = 0; long cnt_off = 0; for (int i=0; i<c.GetEntries(); i++) { // read the i-th event from the file c.GetEntry(i); // First calculate all cuts to speedup the analysis double area = TMath::Pi()*Width*Length; bool cutq = NumIslands<3.5 && NumUsedPixels>5.5 && Leakage1==0; if (!cutq) continue; bool cut0 = area < log10(Size)*898-1535; if (!cut0) continue; for (int angle=0; angle<360; angle+=60) { // -------------------- Source dependent parameter calculation ------------------- double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; // blau rot double dx = MeanX - px*1.02; double dy = MeanY - py*1.02; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- double xi = xi0 + slope0 *slope;// + 1.67972 *(1-1/(1+4.86232*Leakage1)); double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp = (sign1<0 || sign2<0 ? -xi : xi)*(1-Width/Length); //double thetasq = (disp*disp + dist*dist - 2*disp*dist*cos(alpha))*mm2deg*mm2deg; double thetasq = disp*disp + dist*dist - 2*disp*dist*sqrt(1-lx*lx); // Fill the on- and off-histograms if (thetasq<0.024) angle==0 ? cnt_on++ : cnt_off++; } } cout << cnt_on << "\t" << cnt_off << "\t" << cnt_on-cnt_off*0.2 << endl; return Error(cnt_on, cnt_off); } void ganymed_optimdisp() { TH2F hist("", "", 21, 1.200-0.0050, 1.410-0.0050, 13, 0.010-0.0025, 0.140-0.0025); for (float xi0=1.20; xi0<1.41; xi0+=0.01) for (float slope0=0.01; slope0<0.14; slope0+=0.01) { cout << xi0 << "\t" << slope0 << "\t"; hist.Fill(xi0, slope0, ganymed2(xi0, slope0)); } TCanvas *canv = new TCanvas; canv->SetTopMargin(0.01); hist.SetStats(kFALSE); hist.SetContour(100); hist.SetXTitle("Xi_{0} / deg"); hist.SetYTitle("Slope_{0} #dot ns/deg^{2} "); hist.GetYaxis()->SetTitleOffset(1.2); hist.DrawCopy("colz cont2"); }
Which gives the following result
Energy Optimization
#include <iostream> #include <iomanip> #include <TMath.h> #include <TF1.h> #include <TH1.h> #include <TH2.h> #include <TProfile.h> #include <TChain.h> #include <TGraph.h> #include <TCanvas.h> #include <TStopwatch.h> void optimenergy() { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("simulation.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, SlopeSpreadWeighted, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels, Zd, Energy; // Connect the variables to the cordesponding leaves //c.SetBranchAddress("FileId", &FileId); //c.SetBranchAddress("EvtNumber", &EvtNumber); c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); c.SetBranchAddress("Size", &Size); c.SetBranchAddress("Zd", &Zd); c.SetBranchAddress("Energy", &Energy); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; //double abberation = 1.02; // -------------------- Source dependent parameter calculation ------------------- TH2F h2svse ("H_SvsE", "", 26*3, 2.2, 4.8, 3*30, 1.3, 4.3); TH2F h2est ("H_EstVsMC", "", 26*3, 2.2, 4.8, 26*3, 2.2, 4.8); TH2F h2bias ("H_BiasLog", "", 26*3, 2.2, 4.8, 100, -1, 1); TH2F h2lin ("H_BiasLin", "", 26*3, 2.2, 4.8, 100, -1, 2); TH2F h2size ("H_ResSize", "", 26*3, 2.2, 4.8, 100, -1, 1); TH2F h2dist ("H_ResDist", "", 50, 0, 2.5, 100, -1, 1); TH2F h2slope("H_ResSlope", "", 50, -10, 10, 100, -1, 1); TH2F h2zd ("H_ResZd", "", 90, 0, 90, 100, -1, 1); h2svse.SetStats(kFALSE); h2bias.SetStats(kFALSE); h2lin.SetStats(kFALSE); h2est.SetStats(kFALSE); h2size.SetStats(kFALSE); h2dist.SetStats(kFALSE); h2slope.SetStats(kFALSE); h2zd.SetStats(kFALSE); // Loop over all events TStopwatch clock; // Loop over all wobble positions in the camera for (int i=0; i<c.GetEntries(); i++) { // read the i-th event from the file c.GetEntry(i); // First calculate all cuts to speedup the analysis double area = TMath::Pi()*Width*Length; // The abberation correction does increase also Width and Length by 1.02 int angle = 0; // -------------------- Source dependent parameter calculation ------------------- double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; double dx = MeanX - px*1.022; double dy = MeanY - py*1.022; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- //double xi = 1.34723 + 0.15214 *slope + 0.970704*(1-1/(1+8.89826*Leakage1)); double xi = 1.340 + 0.0755 *slope + 1.67972 *(1-1/(1+4.86232*Leakage1)); double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp = (sign1<0 || sign2<0 ? -xi : xi)*(1-Width/Length); double thetasq = disp*disp + dist*dist - 2*disp*dist*sqrt(1-lx*lx); if (thetasq<0.024) continue; bool cutq = NumIslands<3.5 && NumUsedPixels>5.5 && Leakage1<0.1; if (!cutq) continue; bool cut0 = area < log10(Size)*898-1535; if (!cut0) continue; h2svse.Fill(log10(Energy), log10(Size)); double step = 0.8; double dd = dist<step ? -(dist-step)*0.4 : (dist-step)*0.65; double energy = pow(10, 0.33 + log10(Size)/1.1 + Zd*0.02 + dd + slope*0.01); h2est.Fill(log10(Energy), log10(energy)); h2bias.Fill(log10(energy), log10(energy)-log10(Energy)); h2lin.Fill(log10(energy), (energy-Energy)/Energy); h2size.Fill (log10(Size), log10(energy)-log10(Energy)); h2dist.Fill (dist, log10(energy)-log10(Energy)); h2slope.Fill(slope, log10(energy)-log10(Energy)); h2zd.Fill( Zd, log10(energy)-log10(Energy)); } clock.Print(); TF1 fx("f", "x", -100, 100); TF1 f0("f", "0", -100, 100); TCanvas *canv = new TCanvas("Canvas", "Energy estimation", 800, 900); canv->Divide(2,4); canv->cd(1); gPad->SetGridy(); h2size.DrawCopy("colz"); f0.DrawCopy("same"); canv->cd(3); gPad->SetGridy(); h2dist.DrawCopy("colz"); f0.DrawCopy("same"); canv->cd(5); gPad->SetGridy(); h2slope.DrawCopy("colz"); f0.DrawCopy("same"); canv->cd(7); gPad->SetGridy(); h2zd.DrawCopy("colz"); f0.DrawCopy("same"); canv->cd(2); gPad->SetGridy(); h2svse.DrawCopy("colz"); fx.DrawCopy("same"); canv->cd(4); gPad->SetGridy(); h2est.DrawCopy("colz"); fx.DrawCopy("same"); canv->cd(6); gPad->SetGridy(); h2bias.DrawCopy("colz"); f0.DrawCopy("same"); canv->cd(8); TGraph grlin; TGraph grlog; for (int x=1; x<h2bias.GetNbinsX(); x++) { TH1D *p = h2bias.ProjectionY("_py", x, x); if (p->GetRMS()>0) grlog.SetPoint(grlog.GetN(), h2bias.GetXaxis()->GetBinCenter(x), pow(10, p->GetRMS())-1); delete p; p = h2lin.ProjectionY("_py", x, x); if (p->GetRMS()>0) grlin.SetPoint(grlin.GetN(), h2lin.GetXaxis()->GetBinCenter(x), p->GetRMS()); delete p; } grlog.SetMarkerColor(kBlue); grlin.SetMinimum(0); grlin.DrawClone("A*"); grlog.DrawClone("*"); }
Day-by-day light-curve
WITH Table8 AS ( WITH Table7 AS ( WITH Table6 AS ( WITH Table5 AS ( WITH Table4 AS ( WITH Table3 AS ( WITH Table2 AS ( WITH Table1 AS ( WITH Table0 AS ( SELECT -- 0 FileId, Weight, Size, NumUsedPixels, NumIslands, Leakage1, MeanX, MeanY, CosDelta, SinDelta, M3Long, SlopeLong, Width/Length AS WdivL, PI()*Width*Length AS Area, cosa*X - sina*Y AS PX, cosa*Y + sina*X AS PY FROM RunInfo LEFT JOIN Images USING (FileId) LEFT JOIN Position USING (FileId, EvtNumber) CROSS JOIN Wobble WHERE fSourceKey=5 AND fRunTypeKey=1 AND fR750Cor>0.9e0*fR750Ref AND NumUsedPixels>5.5 AND NumIslands<3.5 AND Leakage1<0.1 ) -- AS Table0 SELECT -- 1 FileId, Weight, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, WdivL, MeanX - PX*1.02e0 AS DX, MeanY - PY*1.02e0 AS DY FROM Table0 WHERE Area < LOG10(Size)*898-1535 ) -- AS Table1 SELECT -- 2 FileId, Weight, CosDelta, SinDelta, DX, DY, M3Long, SlopeLong, Leakage1, WdivL, SQRT(DX*DX + DY*DY) AS Norm FROM Table1 ) -- AS Table2 SELECT -- 3 FileId, Weight, M3Long, SlopeLong, Leakage1, WdivL, Norm, LEAST(GREATEST((CosDelta*DY - SinDelta*DX)/Norm, -1), 1) AS LX, SIGN(CosDelta*DX + SinDelta*DY) AS Sign FROM Table2 ) -- AS Table3 SELECT -- 4 FileId, Weight, Leakage1, WdivL, LX, Norm *0.0117193246260285378e0 AS Dist, M3Long *Sign*0.0117193246260285378e0 AS M3L, SlopeLong*Sign/0.0117193246260285378e0 AS Slope FROM Table3 ) -- AS Table4 SELECT -- 5 FileId, Weight, WdivL, Dist, LX, M3L, Slope, -- 1.39252e0 + 0.154247e0*Slope + 1.67972e0*(1-1/(1+4.86232e0*Leakage1)) AS Xi 1.34e0 + 0.0755e0*Slope + 1.67972e0*(1-1/(1+4.86232e0*Leakage1)) AS Xi FROM Table4 ) -- AS Table5 SELECT -- 6 FileId, Weight, Dist, LX, IF (M3L<-0.07 || (Dist-0.5e0)*7.2e0-Slope<0, -Xi, Xi) * (1-WdivL) AS Disp FROM Table5 ) -- AS Table6 SELECT -- 7 FileId, Weight, -- cos(alpha) = sqrt(1-lx^2) (Disp*Disp + Dist*Dist - 2*Disp*Dist*SQRT(1-LX*LX)) AS ThetaSq FROM Table6 ) -- AS Table8 SELECT -- 9 FileId, COUNT(*) AS `Count`, COUNT(IF(Weight>0, 1, NULL)) AS `Signal`, COUNT(IF(Weight<0, 1, NULL))/5 AS `Background` FROM Table7 WHERE ThetaSq<0.024336 GROUP BY FileId ) -- AS Table8 SELECT -- 9 FileId, 20000000+FLOOR(FileId/1000) AS fNight, FileId%1000 AS fRunID, `Count` AS fNumRuns, `Signal` AS fNumSigEvts, `Background` AS fNumBgEvts, `Signal` - `Background` AS fNumExcEvts FROM Table8
Note that this query can easily run 30min or more!
Theta-Square Plot
There are several ways ot prepare a theta-square plot. If you want to change the cuts later on, you will have to download all the data and do it offline. However, this is time- and bandwidth-hungry. If you know your cuts already, an easy easy solution is to let mysql create the plot for you and just use a local program to plot it. It significantly decreases bandwidth and space requirement (maybe from several GB to just a few byte) and it significantly increase CPU requirement (because data does not have to be converted to ascii representation and back and compressed for the transfer but it fully processed in memory).
Assuming you produce a root file which contains three branches Bin (the number of the bin), Weight (-0.2 for background and 1 for signal) and Counts for the number of bin entries. You can for example run the following query using rootifysql (see DatabaseBasedAnalysis/rootifysql). Note that you might want to adapt the theta-sq cut your are going to use and the number of bins into which you sort the data up to the theta-square cut.
WITH Table7 AS ( WITH Table6 AS ( WITH Table5 AS ( WITH Table4 AS ( WITH Table3 AS ( WITH Table2 AS ( WITH Table1 AS ( WITH Table0 AS ( SELECT -- 0 Weight, Size, NumUsedPixels, NumIslands, Leakage1, MeanX, MeanY, CosDelta, SinDelta, M3Long, SlopeLong, Width/Length AS WdivL, PI()*Width*Length AS Area, cosa*X - sina*Y AS PX, cosa*Y + sina*X AS PY FROM RunInfo LEFT JOIN Images USING (FileId) LEFT JOIN Position USING (FileId, EvtNumber) CROSS JOIN Wobble WHERE FileId BETWEEN 131101000 AND 131107000 AND fSourceKey=5 AND fRunTypeKey=1 AND fZenithDistanceMax<35 AND fR750Cor>0.9e0*fR750Ref AND NumUsedPixels>5.5 AND Leakage1<0.1 ) -- AS Table0 SELECT -- 1 Weight, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, WdivL, MeanX - PX*1.02e0 AS DX, MeanY - PY*1.02e0 AS DY FROM Table0 WHERE Area < LOG10(Size)*898-1535 ) -- AS Table1 SELECT -- 2 Weight, CosDelta, SinDelta, DX, DY, M3Long, SlopeLong, Leakage1, WdivL, SQRT(DX*DX + DY*DY) AS Norm FROM Table1 ) -- AS Table2 SELECT -- 3 Weight, M3Long, SlopeLong, Leakage1, WdivL, Norm, LEAST(GREATEST((CosDelta*DY - SinDelta*DX)/Norm, -1), 1) AS LX, SIGN(CosDelta*DX + SinDelta*DY) AS Sign FROM Table2 ) -- AS Table3 SELECT -- 4 Weight, Leakage1, WdivL, LX, Norm *0.0117193246260285378e0 AS Dist, M3Long *Sign*0.0117193246260285378e0 AS M3L, SlopeLong*Sign/0.0117193246260285378e0 AS Slope FROM Table3 ) -- AS Table4 SELECT -- 5 Weight, WdivL, Dist, LX, M3L, Slope, 1.299e0 + 0.0632e0*Slope + 1.67972e0*(1-1/(1+4.86232e0*Leakage1)) AS Xi FROM Table4 ) -- AS Table5 SELECT -- 6 Weight, Dist, LX, IF (M3L<-0.07 || (Dist-0.5e0)*7.2e0-Slope<0, -Xi, Xi) * (1-WdivL) AS Disp FROM Table5 ) -- AS Table6 SELECT -- 7 Weight, (Disp*Disp + Dist*Dist - 2*Disp*Dist*SQRT(1-LX*LX)) AS ThetaSq FROM Table6 ) -- AS Table8 SELECT -- 9 /* The number 0.024 corresponds to the theta-sq cut you want to apply. */ /* The number 3 is the number of bins into which the values smaller */ /* the theta-sq cut are divide. We are basically calculating the bin- */ /* number into which the event is sorted later. 0.024/3 is the bin- */ /* width. */ FLOOR(ThetaSq/(0.024/3)) AS Bin, Weight, COUNT(*) AS `Count` FROM Table7 GROUP BY Bin, Weight /* If no printed on the console, strictly speaking, this is not necessary */ ORDER BY Bin, Weight
In a second step, you might want to produce a theta-square plot with your preferred language (here: root/C++). It is very important that you ensure that your plotting routine is consistent with the the histogram data produced by the former SQL query in terms of bin-width''' Here is an example root macro to plot the data:
// ============= Significance according to Li&Ma =============== Double_t LiMa(Double_t s, Double_t b, Double_t alpha) { const Double_t sum = s+b; if (s<0 || b<0 || alpha<=0) return -1; const Double_t l = s==0 ? 0 : s*TMath::Log(s/sum*(alpha+1)/alpha); const Double_t m = b==0 ? 0 : b*TMath::Log(b/sum*(alpha+1) ); return l+m<0 ? -1 : TMath::Sqrt((l+m)*2); } void plot_thetasq() { // ====================== Setup the plot =================== double cut = 0.024; // Theta-square cut int icut = 3; // Number of bins at which the theta-square cut should be displayed double max = 0.98; // maximum to be displayed on the x-axis (in degree^2) double width = cut/icut; // width of a single bin (here: a third of the theta-square cut) const char *title = "FACT"; const char *watermark = "preliminary"; // ================= Initialize root chain ================= TChain chain("Result"); chain.AddFile("thetasq-hist.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double Bin, Weight, Count; // Connect the variables to the cordesponding leaves chain.SetBranchAddress("Bin", &Bin); chain.SetBranchAddress("Weight", &Weight); chain.SetBranchAddress("Count", &Count); // ==================== Setting up Histogram =============== int num = floor(max/width); // Create a histogram for on- and off-data TH1F hon ("on", "", num, 0, num*width); TH1F hoff("off", "", num, 0, num*width); hon.Sumw2(); hoff.Sumw2(); // Loop over all events for (int i=0; i<chain.GetEntries(); i++) { // read the i-th event from the file chain.GetEntry(i); if (Weight>0) { hon.SetBinContent((int)Bin+1, Count); hon.SetBinError( (int)Bin+1, sqrt(Count)); } else { hoff.SetBinContent((int)Bin+1, -Weight*Count); hoff.SetBinError( (int)Bin+1, sqrt(-Weight*Count)); } } // ========================= Result ======================== double signal = hon.Integral(1, icut); double background = hoff.Integral(1, icut); double significance = LiMa(signal, background*5, 0.2); cout << "Signal: " << signal << endl; cout << "Backgr: " << background << endl; cout << "Signif: " << significance << endl; // ======================= Plotting ======================== TCanvas *c = new TCanvas; c->SetBorderMode(0); c->SetFrameBorderMode(0); c->SetFillColor(kWhite); c->SetLeftMargin(0.12); c->SetRightMargin(0.01); c->SetBottomMargin(0.16); c->SetTopMargin(0.18); c->SetGridy(); hon.SetStats(kFALSE); hon.SetMinimum(0); hon.SetLineColor(kBlack); hon.SetMarkerColor(kBlack); hon.SetLineWidth(2); hoff.GetXaxis()->SetLabelSize(0.06); hoff.GetXaxis()->SetTitleSize(0.06); hoff.GetXaxis()->SetTitleOffset(0.95); hoff.GetYaxis()->SetLabelSize(0.06); hoff.GetYaxis()->SetTitleSize(0.06); hoff.GetYaxis()->CenterTitle(); hoff.SetXTitle("#vartheta^2"); hoff.SetYTitle("Counts"); hoff.SetTitleSize(0.07); hoff.SetTitle(""); hoff.SetStats(kFALSE); hoff.SetLineWidth(2); hoff.SetLineColor(kBlack); hoff.SetMarkerColor(kBlack); hoff.SetFillColor(17); hoff.SetMinimum(0); hoff.SetMaximum(TMath::Max(hon.GetMaximum(), hoff.GetMaximum())*1.1); hoff.DrawCopy("bar"); hon.DrawCopy("same"); // draw a preliminary tag TLatex text; text.SetTextColor(kWhite); text.SetBit(TLatex::kTextNDC); text.SetTextSize(0.07); text.SetTextAngle(2.5); TString wm(watermark); text.DrawLatex(0.45, 0.2, wm); // draw line showing cut TLine line; line.SetLineColor(14); line.SetLineStyle(7); line.DrawLine(icut*width, 0, icut*width, hoff.GetMaximum()); // Add a title above the plot TPaveText *pave=new TPaveText(0.12, 0.83, 0.99, 0.975, "blNDC"); pave->SetBorderSize(1); pave->SetLabel(title); TText *ptxt = pave->AddText(" "); ptxt->SetTextAlign(20); ptxt = pave->AddText(Form("Significance %.1f#sigma, off-scale 0.2", significance)); ptxt->SetTextAlign(21); ptxt = pave->AddText(Form("%.1f excess events, %.1f background events", signal-background, background)); ptxt->SetTextAlign(21); pave->SetBit(kCanDelete); pave->Draw("br"); }
This is how the result will look like:
Attachments (4)
- Result.png (52.2 KB ) - added by 6 years ago.
- Result2.png (79.3 KB ) - added by 6 years ago.
- Result4.png (63.3 KB ) - added by 6 years ago.
- ThetaSquare.png (15.9 KB ) - added by 6 years ago.
Download all attachments as: .zip