#!/bin/bash # ---------------------------------------------------------------- # # README README README README README README README README README # # ---------------------------------------------------------------- # # # # To use this script, you need # # - a computer with access to the FACT database in La Palma # # - a file with the password of a valid mysql-user # # - to define the setup below for # # a) the DB access # # b) the data you want to have # # # # To define the setup, search for SETUP in this script and # # read the details there # # # # Per data request, you get up to 3 files: # # *_internal.dat # # *_collaborators.dat # # *_external.dat (only if binning is 20min or nightly) # # # # Please have in mind that this started as a tool for myself, then # # others started using it. Also the script is not yet finalized. # # In case you find problems and/or have a feature request, please # # send and email to dorner@astro.uni-wuerzburg.de # # # # ---------------------------------------------------------------- # # README README README README README README README README README # # ---------------------------------------------------------------- # # ToDo (notes DD): # ---------------- # - finalize policy and references # must-have # - update function for zd/th-correction # - update CU for QLA # - add CU for ISDC analysis # - add < 20121212 data for QLA # - check crab flux # nice-to-have # - add E2dNdE # - functionality to determine start time for seaon-binning # - offer several predefined datachecks? # - file as attachment to email ofr download.php # # content of files (wish list): # ----------------------------- # REMARK: keep order of columns to allow for reading with TGraph directly from file: X Y EX EY # # internal # -------- # time: time, delta time, start, stop, ontime # flux: excrate, excerr, corrate, corerr, CU CUerr, flux, fluxerr, # other info on flux: signif, cu-factor, num exc, num sig, num bg # other info: zd th R750cor R750ref # # external (allow only 20min and nightly binning) # -------- # time: time, delta time, start, stop # flux: excrate, excerr # # collaborators # ------------- # time: time, delta time, start, stop, ontime # flux: excrate, excerr, corrate, corerr, flux, flux-err, significance # # additional information to put: # ------------------------------ # timestamp of creation # query (for debugging / answering questions) # policy (adapted for internal/collaborators/external) [define in files to be used also by Send_Data*.sh # function print_policy() { echo "#" echo "# Data Usage Policy: " if [ "$expert" == "no" ] then echo "# Using data from the FACT Quick Look Analysis, you agree to cite the FACT design" echo "# paper (H. Anderhub et al. JINST 8 P6008) and the quick look analysis website " echo "# (https://fact-project.org/monitoring)." echo "#" echo "# References: " echo "# FACT design paper: http://adsabs.harvard.edu/abs/2013JInst...8P6008A" echo "# http://iopscience.iop.org/1748-0221/8/06/P06008 " echo "# FACT Performance Paper: http://adsabs.harvard.edu/abs/2014JInst...9P0012B" echo "# http://iopscience.iop.org/1748-0221/9/10/P10012" echo "# FACT quick look analysis: https://fact-project.org/monitoring" echo "# http://adsabs.harvard.edu/abs/2015arXiv150202582D" echo "# If you intend to use data or information from this website, please let us know for reference." else echo "# As a member or associated member of the FACT Collaboration, you have access to internal information." echo "# Any publication using FACT internal information has to have the full FACT author list." fi echo "#" } print_selection() { echo "#" echo "# Your Selection: " sourcename=`mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -s -e "SELECT fSourceName FROM Source WHERE fSourceKey="$source` echo "# Source: "$sourcename echo "# Time range: "$nightmin"-"$nightmax echo "# Time format: "$timeunit if [ $bin -lt 0 ] then unit="night(s)" else if [ $bin -eq 0 ] then unit="periods" else unit="minutes" fi fi binning=`echo $bin | sed -e 's/-//'`" "$unit echo "# Binning: "$binning if [ "$expert" = "yes" ] then echo "# Additional Internal Selection: " echo "# "`echo $table | sed -e 's/AnalysisResultsRun//'`"-Analysis was used." if [ "$zdmax" != "" ] then echo "# Maximum Zenith Distance: "$zdmax" degree" fi if [ "$thmax" != "" ] then echo "# Maximum Trigger Threshold: "$thmax" DAC counts" fi if [ "$light" != "" ] then echo "# Light Condition Cut: "$lightcut fi if [ "$dust" != "" ] then echo "# Calima Cut: dust < "$dust" ug/cm3" fi if [ "$usedch" == "yes" ] then echo "# Data quality selection based on the cosmic ray rate was applied." fi fi echo "#" } function get_results() { # some query parts # some numbers for flux calculation crabflux="3.37e-11" fluxprec=13 crabflux="3.37" fluxprec=2 # some names and definitions needed several times below # ontime ontime1=" TIME_TO_SEC(TIMEDIFF(fRunStop,fRunStart))*fEffectiveOn " ontime2=" fOnTimeAfterCuts " ontimeif=" IF(ISNULL(fEffectiveOn), "$ontime2", "$ontime1") " # zd and threshold zenith="fZenithDistance" thresh="IF(ISNULL(fThresholdMinSet),fThresholdMedian,fThresholdMinSet)" # correction factor for excess rate (formula by TB) correvts=" fNumExcEvts*(pow(cos("$zenith"Mean*PI()/180),3)+14.8/21.9*pow(sin(2*"$zenith"Mean*PI()/180),5))/((1-0.00124/1.21*("$thresh"-500)*("$thresh">=500))) " # conversion to CU (determined by DD for QLA) # https://www.fact-project.org/logbook/showthread.php?tid=4927 cufactor="CUQLA(fNight)" # missing: ISDC analysis # some calculations excerr="ExcErr(Sum(fNumSigEvts), SUM(fNumBgEvts))" CU="SUM("$correvts"/"$cufactor")/SUM("$ontimeif")*3600" CUerr=$excerr"/SUM("$ontimeif")*3600*SUM("$correvts"/"$cufactor")/SUM(fNumExcEvts)" excerr2="ExcErr(SUM(o.sigevts),SUM(o.bgevts))" CU2="SUM(o.corevts/o.cufactor)/SUM(o.ot)*3600" CUerr2=$excerr2"/SUM(o.ot)*3600*SUM(o.corevts/o.cufactor)/(SUM(o.sigevts)-SUM(o.bgevts))" # columns to be selected # for night-binning ontime=" ROUND(SUM("$ontimeif")/60., 1) AS ontime" excrate=" ROUND(SUM(fNumExcEvts)/SUM("$ontimeif")*3600, 1) AS excrate" significance="ROUND(LiMa(Sum(fNumSigEvts), SUM(fNumBgEvts)), 1) AS significance" numexc="Sum(fNumExcEvts) AS numexc" numsig="Sum(fNumSigEvts) AS numsig" numbg="Sum(fNumBgEvts) AS numbg" excrateerr=" ROUND("$excerr"/SUM("$ontimeif")*3600, 1) AS excrateerr" correxcrate=" ROUND(SUM("$correvts")/SUM("$ontimeif")*3600, 1) AS correxcrate" correxcrateerr=" ROUND("$excerr"/SUM("$ontimeif")*3600*SUM("$correvts")/SUM(fNumExcEvts), 1) AS correxcrateerr" cu=" ROUND("$CU", 2) AS cu" cuerr=" ROUND("$CUerr", 2) AS cuerr" flux="ROUND("$CU" * "$crabflux", 2) AS flux" fluxerr="ROUND("$CUerr" * "$crabflux", 2) AS fluxerr" # for minute binning ontime2=" ROUND(SUM(o.ot)/60., 1) AS ontime" excrate2=" ROUND((SUM(o.sigevts)-SUM(o.bgevts))/SUM(o.ot)*3600, 1) AS excrate" significance2=" ROUND(LiMa(SUM(o.sigevts),SUM(o.bgevts)), 1) AS significance" numexc2="Sum(o.sigevts-o.bgevts) AS numexc" numsig2="Sum(o.sigevts) AS numsig" numbg2="Sum(o.bgevts) AS numbg" excrateerr2=" ROUND("$excerr2"/SUM(o.ot)*3600, 1) AS excrateerr" correxcrate2=" ROUND(SUM(o.corevts)/SUM(o.ot)*3600, 1) AS correxcrate" correxcrateerr2=" ROUND("$excerr2"/SUM(o.ot)*3600*SUM(o.corevts)/(SUM(o.sigevts)-SUM(o.bgevts)), 1) AS correxcrateerr" cu2=" ROUND("$CU2", 2) AS cu" cuerr2=" ROUND("$CUerr2", 2) AS cuerr" flux2="ROUND("$CU2" * "$crabflux", "$fluxprec") AS flux" fluxerr2="ROUND("$CUerr2" *"$crabflux", "$fluxprec") AS fluxerr" case $timeunit in mjd) delta="(Mjd(MAX(fRunStop))-Mjd(MIN(fRunStart)))/2" start=" Mjd(MIN(fRunStart)) AS start" stop=" Mjd(MAX(fRunStop)) AS stop" deltat=$delta" AS deltat" time=" Mjd(MIN(fRunStart))+"$delta" AS time" delta2="(Mjd(MAX(o.stop))-Mjd(MIN(o.start)))/2" start2=" Mjd(MIN(o.start)) AS start" stop2=" Mjd(MAX(o.stop)) AS stop" deltat2=$delta2" AS deltat" time2=" Mjd(MIN(o.start))+"$delta2" AS time" ;; unix) delta="(Unix_timestamp(CONVERT_TZ(MAX(fRunStop), '+00:00', 'SYSTEM')) - Unix_timestamp(CONVERT_TZ(MIN(fRunStart), '+00:00', 'SYSTEM')))/2" start="Unix_timestamp(CONVERT_TZ(MIN(fRunStart), '+00:00', 'SYSTEM')) AS start" stop="Unix_timestamp(CONVERT_TZ(MAX(fRunStop), '+00:00', 'SYSTEM')) AS stop" deltat=$delta" AS deltat" time=" Unix_timestamp(CONVERT_TZ(MIN(fRunStart), '+00:00', 'SYSTEM'))+"$delta" AS time" delta2="(Unix_timestamp(CONVERT_TZ(MAX(o.stop), '+00:00', 'SYSTEM')) - Unix_timestamp(CONVERT_TZ(MIN(o.start), '+00:00', 'SYSTEM')))/2" start2=" Unix_timestamp(CONVERT_TZ(MIN(o.start), '+00:00', 'SYSTEM')) AS start" stop2=" Unix_timestamp(CONVERT_TZ(MAX(o.stop), '+00:00', 'SYSTEM')) AS stop" deltat2=$delta2" AS deltat" time2=" Unix_timestamp(CONVERT_TZ(MIN(o.start), '+00:00', 'SYSTEM'))+"$delta2" AS time" ;; *) delta="sec_to_time(time_to_sec(timediff(MAX(fRunStop), MIN(fRunStart)))/2)" start=" MIN(fRunStart) AS start" stop=" MAX(fRunStop) AS stop" deltat=$delta" AS deltat" time=" addtime(MIN(fRunStart), "$delta") AS time" delta2="sec_to_time(time_to_sec(timediff(MAX(o.stop), MIN(o.start)))/2)" start2=" MIN(o.start) AS start" stop2=" MAX(o.stop) AS stop" deltat2=$delta2" AS deltat" time2=" addtime(MIN(o.start), "$delta2") AS time" ;; esac # from and join of query from=" FROM RunInfo LEFT JOIN "$table" USING (fNight, fRunID) " # data check based on artificial trigger rate # details see https://www.fact-project.org/logbook/showthread.php?tid=5790 #dch=" AND fR750Cor/fR750Ref >0.93 " dchstd=" AND fR750Cor/fR750Ref BETWEEN 0.93 AND 1.3 " # put together where-clause of query # time range and source where=" WHERE fSourceKey="$source" AND fNight BETWEEN "$nightmin" AND "$nightmax where=$where" AND NOT ISNULL(fNumExcEvts) " # some sanity checks where=$where" AND fRunTypeKey=1 " # zd cut if [ "$zdmax" != "" ] then where=$where" AND fZenithDistanceMax < "$zdmax fi # th cut if [ "$thmax" != "" ] then where=$where" AND "$thresh" < "$thmax fi # dust cut if [ "$dust" != "" ] then where=$where" AND fTNGDust<"$dust fi # light condition cut if [ "$light" == "nomoon" ] then lightcut=" fZenithDistanceMoon>90" fi if [ "$light" == "dark" ] then lightcut=" fMoonZenithDistance>90 AND fSunZenithDistance>108 " fi if [ "$light" != "" ] then where=$where" AND "$lightcut fi querybase=$from$where if [ "$usedch" == "yes" ] then if [ "$dch" == "" ] then querydch=$dchstd else echo "you are using for datacheck: "$dch querydch=$dch fi fi if [ $bin -le 0 ] then # first part of the query querystart="SELECT " querystart=$querystart" "$time", "$start", "$stop", " # final part of the query if [ $bin -eq 0 ] then orderby=" fPeriod " #querystart=$querystart" fPeriod AS num, " queryend=" GROUP BY fPeriod " else num=" FLOOR((Mjd(fRunStart)-Mjd("$nightmin")-0.5)/"`echo $bin | sed -e 's/-//'`".) " orderby=$num #querystart=$querystart" FLOOR((Mjd(fRunStart)-Mjd("$nightmin")-0.5)/"`echo $bin | sed -e 's/-//'`".) AS num, " queryend=" GROUP BY "$num fi #queryend=" GROUP BY num " if [ "$ontimelimit" = "" ] then queryend=$queryend" HAVING SUM("$ontimeif")>1200 ORDER BY "$orderby else queryend=$queryend" HAVING SUM("$ontimeif")>"$ontimelimit" ORDER BY "$orderby fi # internal queryint=$querystart queryint=$queryint" "$excrate", "$correxcrate", "$cu", "$flux", " queryint=$queryint" "$deltat", "$ontime", " queryint=$queryint" "$excrateerr", "$correxcrateerr", "$cuerr", "$fluxerr", " queryint=$queryint" "$significance", " queryint=$queryint" MIN(fNight) AS nightmin, MAX(fNight) AS nightmax, " queryint=$queryint" "$numexc", "$numsig", "$numbg", " queryint=$queryint" MIN("$zenith"Min) AS zdmin, MAX("$zenith"Max) AS zdmax, " queryint=$queryint" MIN("$thresh") AS thmin, MAX("$thresh") AS thmax, " queryint=$queryint" ROUND(AVG("$cufactor"), 1) AS cufactor, ROUND(AVG(fR750Cor), 2) AS R750cor, ROUND(AVG(fR750Ref), 2) AS R750ref " queryint=$queryint" "$querybase" "$querydch" "$queryend # for collaborators querycol=$querystart querycol=$querycol" "$excrate", "$correxcrate", "$cu", "$flux", " querycol=$querycol" "$deltat", "$ontime", " querycol=$querycol" "$excrateerr", "$correxcrateerr", "$cuerr", "$fluxerr", " querycol=$querycol" "$significance querycol=$querycol" "$querybase" "$querydch" "$queryend # external # no datacheck applied for external files queryext=$querystart" "$excrate", "$deltat", "$excrateerr" "$querybase" "$queryend else # first part of the query querystart="SELECT " querystart=$querystart" "$time2", "$start2", "$stop2", " # final part of the query querybase=" FROM (SELECT fNight, fZenithDistanceMin AS zdmin, fZenithDistanceMax AS zdmax, "$thresh" AS th, " querybase=$querybase" fR750Cor AS R750cor, fR750Ref AS R750ref, "$cufactor" AS cufactor, " querybase=$querybase" @ot:="$ontimeif" AS ot, fRunStart AS start, fRunStop AS stop, " querybase=$querybase" fNumSigEvts AS sigevts, fNumBgEvts AS bgevts, "$correvts" AS corevts, " querybase=$querybase" IF (@night=fNight AND FLOOR((@os+@ot)/"$bin"./60.)<1, @bl, @bl := @bl + 1) AS block, " querybase=$querybase" IF (@night=fNight AND FLOOR((@os+@ot)/"$bin"./60.)<1, @os:=@os + @ot, @os := @ot) AS os, @night :=fNight AS night " querybase=$querybase$from" CROSS JOIN (SELECT @night :=0, @ot :=0, @os :=0, @bl:=0) PARAMS " querybase=$querybase$where" ORDER BY fRunStart) o GROUP BY block HAVING ontime>0.75*"$bin" ORDER BY 'time'" # internal queryint=$querystart queryint=$queryint" "$excrate2", "$correxcrate2", "$cu2", "$flux2", " queryint=$queryint" "$deltat2", "$ontime2", " queryint=$queryint" "$excrateerr2", "$correxcrateerr2", "$cuerr2", "$fluxerr2", " queryint=$queryint" "$significance2", " queryint=$queryint" avg(o.night) AS night, " queryint=$queryint" "$numexc2", "$numsig2", "$numbg2", " queryint=$queryint" MIN(o.zdmin) AS zdmin, MAX(o.zdmax) AS zdmax, MIN(o.th) AS thmin, MAX(o.th) AS thmax, " queryint=$queryint" ROUND(AVG(o.cufactor), 1) AS cufactor, ROUND(AVG(o.R750cor), 2) AS R750cor, ROUND(AVG(o.R750ref), 2) AS R750ref " queryint=$queryint" "$querybase # for collaborators querycol=$querystart querycol=$querycol" "$excrate2", "$correxcrate2", "$cu2", "$flux2", " querycol=$querycol" "$deltat2", "$ontime2", " querycol=$querycol" "$excrateerr2", "$correxcrateerr2", "$cuerr2", "$fluxerr2", " querycol=$querycol" "$significance2 querycol=$querycol" "$querybase # external queryext=$querystart" "$excrate2", "$deltat2", "$ontime2", "$excrateerr2" "$querybase fi # write file for externals only for allowed binnings if [ $bin -eq 20 ] || [ $bin -eq -1 ] then fileext=$datapath"/FACT_preliminary_"$name"_external.dat" if [ "$overwrite" = "yes" ] then if [ "$mode" != "auto" ] then echo "creating "$fileext" ..." fi echo "# This file was created at "`date` > $fileext print_policy >> $fileext fi print_selection >> $fileext headerext="# time["$timeunit"] start["$timeunit"] stop["$timeunit"] excess-rate[evts/h] (stop-start)/2["$timeunit"] excess-rate_error[evts/h] " echo $headerext >> $fileext #echo "$queryext" mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -s -e "$queryext" >> $fileext #mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -e "$queryext" fi if [ "$mode" == "auto" ] && [ "$expert" == "no" ] then return fi fileint=$datapath"/FACT_preliminary_"$name"_internal.dat" if [ "$overwrite" = "yes" ] then if [ "$mode" != "auto" ] then echo "creating "$fileint" ..." fi echo "# This file was created at "`date` > $fileint print_policy >> $fileint print_selection >> $fileint echo "# The following query was used: " >> $fileint echo "# "$queryint >> $fileint echo "#" >> $fileint fi headerint="# time["$timeunit"] start["$timeunit"] stop["$timeunit"] excess-rate[evts/h] corrected_excess-rate[evts/h] flux[CU] flux[e-11/cm2/s] (stop-start)/2["$timeunit"] ontime[min]" headerint=$headerint" excess-rate_error[evts/h] corrected_excess-rate_error[evts/h] flux_error[CU] flux_error[e-11/cm2/s] significance night num_exc num_sig num_bg " headerint=$headerint" zdmin zdmax thmin thmax avg(cufactor) avg(R750cor) avg(R750ref) " echo $headerint >> $fileint #echo "$queryint" mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -s -e "$queryint" >> $fileint #mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -e "$queryint" if [ "$mode" == "auto" ] then return fi filecol=$datapath"/FACT_preliminary_"$name"_collaborators.dat" if [ "$overwrite" = "yes" ] then echo "creating "$filecol" ..." echo "# This file was created at "`date` > $filecol print_policy >> $filecol print_selection >> $filecol echo "# The following query was used: " >> $filecol echo "# "$querycol >> $filecol echo "#" >> $filecol fi headercol="# time["$timeunit"] start["$timeunit"] stop["$timeunit"] excess-rate[evts/h] corrected_excess-rate[evts/h] flux[CU] flux[e-11/cm2/s] (stop-start)/2["$timeunit"] ontime[min]" headercol=$headercol" excess-rate_error[evts/h] corrected_excess-rate_error[evts/h] flux_error[CU] flux_error[e-11/cm2/s] significance " echo $headercol >> $filecol #echo "$querycol" mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -s -e "$querycol" >> $filecol #mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -e "$querycol } # evaluation of command line options (for usage with download.php) if [ ${#@} -eq 13 ] then #get_data.sh $start $stop $source $timebin $email $table $time $expert $dch $zd $th $light $dust mode="auto" overwrite="yes" # setup datapath="/home/factwww/dch/data" #datapath="./data" sqlpw=/home/fact/.mysql.pw #sqlpw=/home/fact/.mysql.pw2 host=10.0.100.21 dbname=factdata nightmin=$1 nightmax=$2 source=$3 bin=$4 if [ "$bin" == "00" ] then bin=0 fi email=$5 table=$6 timeunit=$7 expert=$8 usedch=$9 # novalue gives same result as no if [ "${10}" != "novalue" ] && [ "${10}" != "all" ] then zdmax=${10} fi if [ "${11}" != "novalue" ] && [ "${11}" != "all" ] then thmax=${11} fi if [ "${12}" != "novalue" ] && [ "${12}" != "all" ] then light=${12} fi if [ "${13}" != "novalue" ] && [ "${13}" != "all" ] then dust=${13} fi name=`echo $email | sed -e 's/@/-at-/'` get_results # sending email if [ "$expert" == "yes" ] then cat $fileint | mail -s 'FACT internal data download' -b qla@fact-project.org -r qla@fact-project.org $email else cat $fileext | mail -s 'FACT data download' -b qla@fact-project.org -r qla@fact-project.org $email fi exit fi # -------------------------------------------------------------------------------------- # # SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP # # -------------------------------------------------------------------------------------- # # # # The lines below define the basic setup for the database and give examples and # # explanations for the various options available. # # The request of the data itself is done with a smaller setup further down. # # # # -------------------------------------------------------------------------------------- # # # ---------- # DB SETUP # ---------- # path to file with mysql password sqlpw=/home/$USER/.mysql.pw # host of mysql server with FACT DB #host=lp-fact # ISDC host=10.0.100.21 # LP or LP via vpn #host=localhost # your local machine in case you have a copy of DB # name of database dbname=factdata # # ------------- # BASIC SETUP # ------------- # output path path=`dirname $0` datapath=$path"/data" # create directory for data files if ! [ -e $datapath ] then mkdir $datapath fi # time unit #timeunit=timestamp # default #timeunit=unix timeunit=mjd # time binning # positive values: minutes # negative values: days # special case 0: period # for season binning choose -365 and according start date #bin=20 # minutes #bin=0 # period bin=-1 # nightly #bin=-365 # yearly # choose analysis #table="AnalysisResultsAllQLA" # N/A table="AnalysisResultsRunLP" # QLA #table="AnalysisResultsRunISDC" # ISDC # time range nightmin=20111115 nightmax=20201231 # overwrite dataset file? # (useful to combine different binnings in one file -> set to "no") overwrite="yes" # optional: require minimal ontime per bin (default 20 min) #ontimelimit=30 # 30 min ontimelimit= # default 20 min # data quality selection # if you explicitely don't want a datacheck, you can comment the following line usedch="yes" # use your own datacheck instead # use a line like the following defining your own data quality selection cut #dch=" AND fR750Cor/fR750Ref BETWEEN 0.93 AND 1.3 " # apply additional predefined cuts # light conditions #light="nomoon" # only data with no moon (but twilight allowed) #light="dark" # only dark night data # TNG dust - cut away data with calima #dust=1 #dust=10 # -------------------------------------------------------------------------------------- # # SETUP - GET YOUR DATA HERE - SETUP - GET YOUR DATA HERE - SETUP - GET YOUR DATA HERE # # -------------------------------------------------------------------------------------- # # # # Adapt the lines below to your needs. # # Overwrite default settings above. # # The data-request is sent with the line 'get_results.' # # Minumum setup: Define source key and name for file. # # The list of source keys can be found at # # https://fact-project.org/run_db/db/printtable.php?fTable=Source&fSortBy=fSourceKEY+ # # More examples can be found further down. # # # # REMARKS: # # - correction of effect of zd and threshold not yet finalized and only valid for QLA # # - no CU-conversion available for ISDC-analysis so far (that for QLA is used instead) # # # # -------------------------------------------------------------------------------------- # # 501 MAGIC source=2 name="Mrk501_2014_forMAGIC" bin=-1 nightmin=20140714 nightmax=20140805 get_results bin=30 name="Mrk501_2014_forMAGIC30" get_results bin=0 name="P" nightmin=20140501 nightmax=20140930 get_results bin=20 nightmin=20140623 nightmax=20140623 name="Mrk501_test" get_results # end script here exit # # more examples # # Mrk 421 source=1 name="Mrk421_nightly" bin=-1 get_results name="Mrk421_20min" bin=20 get_results name="Mrk421_3d" bin=-3 get_results name="Mrk421_10d" bin=-10 get_results name="Mrk421_period" bin=0 get_results # Mrk 501 source=2 name="Mrk501_nightly" bin=-1 get_results name="Mrk501_20min" bin=20 get_results name="Mrk501_3d" bin=-3 get_results name="Mrk501_10d" bin=-10 get_results name="Mrk501_period" bin=0 get_results # 2344 source=3 name="2344_nightly" bin=-1 get_results name="2344_20min" bin=20 get_results name="2344_period" bin=0 get_results # 1959 source=7 name="1959_nightly" bin=-1 get_results name="1959_20min" bin=20 get_results name="1959_period" bin=0 get_results # 0323 source=12 name="0323_nightly" bin=-1 get_results name="0323_20min" bin=20 get_results name="0323_period" bin=0 get_results # crab source=5 name="Crab_nightly" bin=-1 get_results name="Crab_20min" bin=20 get_results name="Crab_period" bin=0 get_results name="Crab_season" bin=-365 nightmin=20110716 nightmax=20180716 get_results name="1959_2016" source=7 bin=-1 nightmin=20160201 nightmax=20161105 get_results name="1959_all_variable" overwrite="no" source=7 bin=-365 nightmin=20120201 nightmax=20130131 get_results nightmin=20130201 nightmax=20140131 get_results nightmin=20140201 nightmax=20150131 get_results bin=0 nightmin=20150201 nightmax=20160131 get_results bin=-1 nightmin=20160201 nightmax=20170131 get_results bin=0 nightmin=20170201 nightmax=20180131 get_results overwrite="yes" name="1959_all_variable2" overwrite="no" source=7 bin=-365 nightmin=20120201 nightmax=20130131 get_results nightmin=20130201 nightmax=20140131 get_results nightmin=20140201 nightmax=20150131 get_results bin=0 nightmin=20150201 nightmax=20160131 get_results bin=-1 nightmin=20160201 nightmax=20160817 get_results bin=0 nightmin=20160818 nightmax=20180131 get_results overwrite="yes" bin=0 source=3 name="2344period" get_results # flare night (HESS) name="Mrk501_10min_flarenight" source=2 bin=10 nightmin=20140623 nightmax=20140623 get_results # flare night (HESS) name="Mrk501_5min_flarenight" source=2 bin=5 nightmin=20140623 nightmax=20140623 get_results # full sample name="Mrk421_all_nightly" source=1 get_results name="Mrk501_all_nightly" source=2 get_results name="1959_all_nightly" source=7 get_results name="2344_all_nightly" source=3 get_results name="HESE20160427" source=19 nightmin=20160425 bin=-10 get_results name="AMON20160731" source=21 nightmin=20160730 bin=-10 get_results