[[TOC]] == Good weather Crab data == You want to create a file with good weather data with zenith distance smaller than 35° of Crab of the season 2017/18. The file should contain most of the image parameters with no cuts applied so that you can optimize cuts. {{{#!Spoiler {{{#!sql SELECT Events.MeanX, Events.MeanY, Events.Width, Events.Length, Events.CosDelta, Events.SinDelta, Events.M3Long, Events.SlopeLong, Events.Leakage1, Events.NumIslands, Events.NumUsedPixels, Events.Size, Events.SlopeSpread, Events.SlopeSpreadWeighted, Events.TimeSpread, Events.TimeSpreadWeighted, Position.X, Position.Y FROM Events LEFT JOIN Position USING (FileId, EvtNumber) LEFT JOIN RunInfo USING (FileId) WHERE fSourceKey=5 AND fRunTypeKey=1 AND FileId BETWEEN 170600000 AND 180500000 AND fZenithDistanceMax<35 AND fR750Cor>0.9*fR750Ref }}} }}} == Optimize Area-cut == Assume you have written a large data set of good-weather, low zenith-distance Crab data with the following query to a file. The following will give you a few example of how you can make use of that in your analysis. To speed up things or decrease the file size (715MB), you might want to add additional cuts depending on your analysis. If you want to apply a Theta-square cut, please refer to DatabaseBasedAnalysis#Dataretrieval. {{{#!sql SELECT MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, NumIslands, NumUsedPixels, Size, X, Y FROM Events LEFT JOIN Position USING (FileId, EvtNumber) LEFT JOIN RunInfo USING (FileId) WHERE fSourceKey=5 AND fRunTypeKey=1 AND FileId BETWEEN 170800000 AND 180800000 AND fZenithDistanceMax<35 AND fR750Cor>0.9*fR750Ref }}} {{{#!cpp #include #include #include #include #include #include #include #include #include Double_t LiMa(Double_t s, Double_t b, Double_t alpha=0.2) { const Double_t sum = s+b; if (s<0 || b<0 || alpha<=0) return -1; const Double_t l = s==0 ? 0 : s*TMath::Log(s/sum*(alpha+1)/alpha); const Double_t m = b==0 ? 0 : b*TMath::Log(b/sum*(alpha+1) ); return l+m<0 ? 0 : TMath::Sqrt((l+m)*2); } void optimarea() { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("crab2018.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels, Zd; // Connect the variables to the cordesponding leaves c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); c.SetBranchAddress("Size", &Size); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; // -------------------- Source dependent parameter calculation ------------------- // Create a histogram for on- and off-data TH1F hold("old", "", 100, 0, 1); TH1F hnew("onew", "", 100, 0, 1); TH2F h2area_on( "H_AvsS_on", "", 24, 1.5, 4.5, 200, -2, 0); TH2F h2area_of( "H_AvsS_of", "", 24, 1.5, 4.5, 200, -2, 0); TH2F h2area_sig("H_AvsS_sig", "", 24, 1.5, 4.5, 200, -2, 0); // Loop over all events for (int i=0; i5.5 && Leakage1<0.1; if (!cutq) continue; // Loop over all wobble positions in the camera for (int angle=0; angle<360; angle+=60) { // -------------------- Source dependent parameter calculation ------------------- double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; double dx = MeanX - px*1.02; double dy = MeanY - py*1.02; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- double xi = 1.340 + 0.0755 *slope + 1.67972 *(1-1/(1+4.86232*Leakage1)); double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp = (sign1<0 || sign2<0 ? -xi : xi)*(1-Width/Length); double thetasq = disp_old*disp_old + dist*dist - 2*disp_old*dist*sqrt(1-lx*lx); if (thetasq>0.024) continue; if (angle==0) h2area_on.Fill(log10(Size), log10(area*mm2deg*mm2deg)); else h2area_of.Fill(log10(Size), log10(area*mm2deg*mm2deg)); } } for (int x=1; x<=h2area_sig.GetNbinsX(); x++) for (int y=1; y<=h2area_sig.GetNbinsY(); y++) { double on = h2area_on.Integral(x, x, 1, y); double of = h2area_of.Integral(x, x, 1, y); h2area_sig.SetBinContent(x, y, LiMa(on, of)); } TGraph gmax; for (int x=1; x<=h2area_sig.GetNbinsX(); x++) { TH1D *p = h2area_sig.ProjectionY("_py", x, x); double max = p->GetXaxis()->GetBinCenter(p->GetMaximumBin()); delete p; gmax.SetPoint(gmax.GetN(), h2area_sig.GetXaxis()->GetBinCenter(x), max); } h2area_on.SetStats(kFALSE); h2area_of.SetStats(kFALSE); h2area_sig.SetStats(kFALSE); TCanvas *canv = new TCanvas; canv->Divide(2,2); canv->cd(1); h2area_on.DrawCopy("colz"); gmax.DrawClone("*"); canv->cd(2); h2area_sig.DrawCopy("colz"); gmax.DrawClone("*"); canv->cd(3); h2area_of.DrawCopy("colz"); gmax.DrawClone("*"); canv->cd(4); } }}} Don't forget that this only gives you the best significance per bin in ''Size''. To get the best detection significance, you will have to introduce an additional ''Size''-cut. == Optimize Disp == {{{#!sql SELECT MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, NumIslands, NumUsedPixels, Size, X, Y FROM EventsMC LEFT JOIN PositionMC USING (FileId, EvtNumber) WHER /* ... do some data selection here ... */ }}} The following root-macro (to be compiled!) {{{#!cpp #include #include #include #include #include #include #include #include #include void optimdisp() { // Create chain for the tree Result // This is just easier than using TFile/TTree TChain c("Result"); // Add the input file to the c.AddFile("simulation.root"); // Define variables for all leaves to be accessed // By definition rootifysql writes only doubles double X, Y, MeanX, MeanY, Width, Length, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, Size, ConcCore, ConcCOG, NumIslands, NumUsedPixels, Zd; // Connect the variables to the cordesponding leaves c.SetBranchAddress("X", &X); c.SetBranchAddress("Y", &Y); c.SetBranchAddress("MeanX", &MeanX); c.SetBranchAddress("MeanY", &MeanY); c.SetBranchAddress("Width", &Width); c.SetBranchAddress("Length", &Length); c.SetBranchAddress("CosDelta", &CosDelta); c.SetBranchAddress("SinDelta", &SinDelta); c.SetBranchAddress("M3Long", &M3Long); c.SetBranchAddress("SlopeLong", &SlopeLong); c.SetBranchAddress("Leakage1", &Leakage1); c.SetBranchAddress("NumIslands", &NumIslands); c.SetBranchAddress("NumUsedPixels", &NumUsedPixels); c.SetBranchAddress("Size", &Size); c.SetBranchAddress("Zd", &Zd); // Set some constants (they could be included in the database // in the future) double mm2deg = +0.0117193246260285378; // -------------------- Source dependent parameter calculation ------------------- // Create a histogram for on- and off-data TH1F hold("old", "", 100, 0, 1); TH1F hnew("onew", "", 100, 0, 1); TH2F h2slope("H_VsSlope", "", 75, -8, 7, 100, -2.5, 1.5); TProfile p2slope("P_VsSlope", "", 75, -8, 7); TH2F h2leak( "H_VsLeakage", "", 75, 0, 0.15, 100, -2.5, 1.5); TH2F h2m3l( "H_M3long", "", 75, -0.2, 0.6, 100, -2.5, 1.5); TH2F h2zd( "H_Zd", "", 30, 30, 60, 100, -2.5, 1.5); TH2F h2ni( "H_NumIsl", "", 10, 0.5, 10.5, 100, -2.5, 1.5); TH2F h2np( "H_NumPix", "", 10, 0.5, 100.5, 100, -2.5, 1.5); TH2F h2size( "H_Size", "", 30, 1.5, 4.5, 100, -2.5, 1.5); // Loop over all events for (int i=0; i5.5 && Leakage1<0.1; if (!cutq) continue; bool cut0 = area < log10(Size)*898-1535; if (!cut0) continue; // -------------------- Source dependent parameter calculation ------------------- int angle = 0; double cr = cos(angle*TMath::DegToRad()); double sr = sin(angle*TMath::DegToRad()); double px = cr*X-sr*Y; double py = cr*Y+sr*X; double dx = MeanX - px*1.02; double dy = MeanY - py*1.02; double norm = sqrt(dx*dx + dy*dy); double dist = norm*mm2deg; double lx = min(max((CosDelta*dy - SinDelta*dx)/norm, -1.), 1.); double ly = min(max((CosDelta*dx + SinDelta*dy)/norm, -1.), 1.); double alpha = asin(lx); double sgn = TMath::Sign(1., ly); // ------------------------------- Application ---------------------------------- double m3l = M3Long*sgn*mm2deg; double slope = SlopeLong*sgn/mm2deg; // --------------------------------- Analysis ----------------------------------- //double xi = 1.34723 + 0.15214 *slope + 0.970704*(1-1/(1+8.89826*Leakage1)); double xi_old = 1.39252 + 0.154247*slope + 1.67972 *(1-1/(1+4.86232*Leakage1)); double xi_new = 1.340 + 0.0755 *slope + 1.67972 *(1-1/(1+4.86232*Leakage1)); double sign1 = m3l+0.07; double sign2 = (dist-0.5)*7.2-slope; double disp_old = (sign1<0 || sign2<0 ? -xi_old : xi_old)*(1-Width/Length); double disp_new = (sign1<0 || sign2<0 ? -xi_new : xi_new)*(1-Width/Length); double thetasq_old = disp_old*disp_old + dist*dist - 2*disp_old*dist*sqrt(1-lx*lx); double thetasq_new = disp_new*disp_new + dist*dist - 2*disp_new*dist*sqrt(1-lx*lx); // Fill the on- and off-histograms hold.Fill(thetasq_old); hnew.Fill(thetasq_new); double residual = xi_new-dist/(1-Width/Length); h2slope.Fill(slope, residual); p2slope.Fill(slope, residual); h2leak.Fill(Leakage1, residual); h2m3l.Fill( m3l, residual); h2zd.Fill( Zd, residual); h2ni.Fill( NumIslands, residual); h2np.Fill( NumUsedPixels, residual); h2size.Fill(log10(Size), residual); } TCanvas *canv = new TCanvas; canv->Divide(2,2); canv->cd(1); gPad->SetGridy(); h2slope.DrawCopy("colz"); p2slope.DrawCopy("same"); canv->cd(2); gPad->SetGridy(); h2leak.DrawCopy("colz"); canv->cd(3); gPad->SetGridy(); //h2m3l.DrawCopy("colz"); h2zd.DrawCopy("colz"); //h2ni.DrawCopy("colz"); //h2np.DrawCopy("colz"); //h2size.DrawCopy("colz"); canv->cd(4); // Plot the result hold.SetLineColor(kRed); hnew.SetMinimum(0); hnew.DrawCopy(); hold.DrawCopy("same"); } }}} == Day-by-day light-curve == {{{#!sql WITH Table8 AS ( WITH Table7 AS ( WITH Table6 AS ( WITH Table5 AS ( WITH Table4 AS ( WITH Table3 AS ( WITH Table2 AS ( WITH Table1 AS ( WITH Table0 AS ( SELECT -- 0 FileId, Weight, Size, NumUsedPixels, NumIslands, Leakage1, MeanX, MeanY, CosDelta, SinDelta, M3Long, SlopeLong, Width/Length AS WdivL, PI()*Width*Length AS Area, cosa*X - sina*Y AS PX, cosa*Y + sina*X AS PY FROM RunInfo LEFT JOIN Events USING (FileId) LEFT JOIN Position USING (FileId, EvtNumber) CROSS JOIN Wobble WHERE fSourceKey=5 AND fRunTypeKey=1 AND fR750Cor>0.9e0*fR750Ref AND NumUsedPixels>5.5 AND NumIslands<3.5 AND Leakage1<0.1 ) -- AS Table0 SELECT -- 1 FileId, Weight, CosDelta, SinDelta, M3Long, SlopeLong, Leakage1, WdivL, MeanX - PX*1.02e0 AS DX, MeanY - PY*1.02e0 AS DY FROM Table0 WHERE Area < LOG10(Size)*898-1535 ) -- AS Table1 SELECT -- 2 FileId, Weight, CosDelta, SinDelta, DX, DY, M3Long, SlopeLong, Leakage1, WdivL, SQRT(DX*DX + DY*DY) AS Norm FROM Table1 ) -- AS Table2 SELECT -- 3 FileId, Weight, M3Long, SlopeLong, Leakage1, WdivL, Norm, LEAST(GREATEST((CosDelta*DY - SinDelta*DX)/Norm, -1), 1) AS LX, SIGN(CosDelta*DX + SinDelta*DY) AS Sign FROM Table2 ) -- AS Table3 SELECT -- 4 FileId, Weight, Leakage1, WdivL, LX, Norm *0.0117193246260285378e0 AS Dist, M3Long *Sign*0.0117193246260285378e0 AS M3L, SlopeLong*Sign/0.0117193246260285378e0 AS Slope FROM Table3 ) -- AS Table4 SELECT -- 5 FileId, Weight, WdivL, Dist, LX, M3L, Slope, -- 1.39252e0 + 0.154247e0*Slope + 1.67972e0*(1-1/(1+4.86232e0*Leakage1)) AS Xi 1.34e0 + 0.0755e0*Slope + 1.67972e0*(1-1/(1+4.86232e0*Leakage1)) AS Xi FROM Table4 ) -- AS Table5 SELECT -- 6 FileId, Weight, Dist, LX, IF (M3L<-0.07 || (Dist-0.5e0)*7.2e0-Slope<0, -Xi, Xi) * (1-WdivL) AS Disp FROM Table5 ) -- AS Table6 SELECT -- 7 FileId, Weight, -- cos(alpha) = sqrt(1-lx^2) (Disp*Disp + Dist*Dist - 2*Disp*Dist*SQRT(1-LX*LX)) AS ThetaSq FROM Table6 ) -- AS Table8 SELECT -- 9 FileId, COUNT(*) AS `Count`, COUNT(IF(Weight>0, 1, NULL)) AS `Signal`, COUNT(IF(Weight<0, 1, NULL))/5 AS `Background` FROM Table7 WHERE ThetaSq<0.024336 GROUP BY FileId ) -- AS Table8 SELECT -- 9 FileId, 20000000+FLOOR(FileId/1000) AS fNight, FileId%1000 AS fRunID, `Count` AS fNumRuns, `Signal` AS fNumSigEvts, `Background` AS fNumBgEvts, `Signal` - `Background` AS fNumExcEvts FROM Table8 }}} Note that this query can easily run 30min or more!