#!/bin/bash # # missing: # finalize DB-entries for being called # prepare directly template for email # add x-ray trigger as trigger type or # add note for values > 70 evts/h for X-ray triggers # # add more information: # mjd, obs-summary, weather info (clouds? dust?), CU, corrected excrates # # evaluate output of all nights (check also for crab to estimate fluctuations) # error emails in case no DB content / no QLA # calculate delay of QLA and send email if > 30 Min # run for more nights: # for (( i=0; i < 70 ; i++)); do date=`date --date="-${i}days" +%Y%m%d`; /home/fact/SW.automatic.processing/DataCheck/QuickLook/FlareAlerts.sh $date; done > logfile source `dirname $0`/../Sourcefile.sh printprocesslog "INFO starting $0" emailfrom=dorner@astro.uni-wuerzburg.de emailto=fact-online@lists.phys.ethz.ch if [ ! -e $flarealertspath ] || [ "$flarealertspath" == "" ] then echo "flarealertspath "$flarealertspath" missing on "$HOSTNAME printprocesslog "ERROR flarealertspath "$flarealertspath" missing on "$HOSTNAME finish fi voeventpath="/home/fact/amon/flare_alerts/" logfile=$runlogpath"/FlareAlerts-"$datetime".log" date > $logfile # get date if [ "$1" != "" ] then checkstring=`echo $1 | grep -E -o '^20[0-9][0-9][01][0-9][0-3][0-9]$'` echo $checkstring if [ "$checkstring" = "" ] then night=`date +%Y%m%d --date="-12 HOUR"` else night=$1 fi else night=`date +%Y%m%d --date="-12 HOUR"` fi echo "Processing "$night >> $logfile # get sources for current night from DB (RunInfo) query="SELECT fSourceKey FROM RunInfo WHERE fNight="$night" AND fRunTypeKey=1 AND NOT ISNULL(fSourceKey) GROUP BY fSourceKey" sourcekeys=( `sendquery` ) if [ ${#sourcekeys[@]} -eq 0 ] then echo " No sources found for "$night >> $logfile finish fi printprocesslog "INFO Checking the "${#sourcekeys[@]}" sourcekeys: "${sourcekeys[@]} # some stuff for queries: ontime="IF(ISNULL(fEffectiveOn), fOnTimeAfterCuts, TIME_TO_SEC(TIMEDIFF(fRunStop,fRunStart))*fEffectiveOn)" threshold="IF (ISNULL(fThresholdMinSet), fThresholdMedian, fThresholdMinSet)" #cu="20.0" cu="CUQLA(fNight)" corr="1" # missing: corrected excessrates # missing: excerr is NOT rate function get_query_nightly_binning() { # query to get information from DB query="SELECT fSourceKey AS num, " query=$query"fNight AS night, MIN(fRunID) AS runmin, MAX(fRunID) AS runmax, " query=$query"MIN(fRunStart) AS start, MAX(fRunStop) AS stop, " query=$query"ROUND(SUM("$ontime")/3600.,1) AS ontime, " query=$query"SUM(fNumSigEvts) AS sig, SUM(fNumBgEvts) AS bg, " query=$query"ROUND(SUM(fNumBgEvts)/SUM("$ontime")*3600,1) AS bgrate, " query=$query"SUM(fNumExcEvts) AS exc, " query=$query"ROUND(ExcErr(SUM(fNumSigEvts), SUM(fNumBgEvts)), 1) AS excerr, " query=$query"ROUND(SUM(fNumExcEvts)/SUM("$ontime")*3600,1) AS excrate, " query=$query"ROUND(ExcErr(SUM(fNumSigEvts), SUM(fNumBgEvts))/SUM("$ontime")*3600, 1) AS excrateerr, " query=$query"ROUND(SUM(fNumExcEvts*"$corr")/SUM("$ontime")*3600,1) as corexcrate, " # put here correction factor query=$query"ROUND(ExcErr(SUM(fNumSigEvts), SUM(fNumBgEvts))/SUM("$ontime")*3600*SUM(fNumExcEvts)/SUM(fNumExcEvts*"$corr"), 1) AS corexcrateerr, " # correctionfactor = exc / exc_cor, put here correction factor query=$query"ROUND(LiMa(SUM(fNumSigEvts), SUM(fNumBgEvts)),1) AS signif, " query=$query"ROUND(SUM(fNumExcEvts)/SUM("$ontime")*3600/"$cu",1) AS cu, " # make value time dependent query=$query"ROUND(ExcErr(SUM(fNumSigEvts), SUM(fNumBgEvts))/SUM("$ontime")*3600/"$cu", 1) AS cuerr, " # make value time dependent query=$query"ROUND(SUM(fNumExcEvts*"$corr")/SUM("$ontime")*3600/"$cu",1) as corcu, " # make value time dependent # put here correction factor query=$query"ROUND(ExcErr(SUM(fNumSigEvts), SUM(fNumBgEvts))/SUM("$ontime")*3600*SUM(fNumExcEvts)/SUM(fNumExcEvts*"$corr")/"$cu", 1) AS corcuerr, " # correctionfactor = exc / exc_cor # make value time dependent # put here correction factor query=$query"MIN(fZenithDistanceMin) as zdmin, MAX(fZenithDistanceMax) as zdmax, " query=$query"MIN("$threshold") as thmin, MAX("$threshold") as thmax " query=$query"FROM AnalysisResultsRunLP " query=$query"LEFT JOIN RunInfo USING (fNight, fRunID) " query=$query"WHERE fSourceKey="$sourcekey" AND fNight="$night" AND NOT ISNULL(fNumExcEvts) " query=$query"GROUP BY fNight, fSourceKey " # query=$query"ORDER BY fRunStart " query=$query"HAVING ontime > 0.5 " # at least 30 minutes of observation } function get_query_minute_binning() { # set binning if [ "$1" != "" ] then bin2=$1 else bin2=$bin fi # query to get information from DB query="SELECT MAX(o.b) AS num, " query=$query"MIN(o.n) AS night, MIN(o.run) AS runmin, MAX(o.run) AS runmax, " query=$query"MIN(o.start) AS start, MAX(o.stop) AS stop, " query=$query"ROUND(SUM(o.ot)/60.,1) AS ontime, " query=$query"SUM(o.sig) AS sig, SUM(o.bg) AS bg, " query=$query"ROUND(SUM(o.bg)/SUM(o.ot)*3600,1) AS bgrate, " query=$query"SUM(o.exc) AS exc, " query=$query"ROUND(ExcErr(SUM(o.sig), SUM(o.bg)), 1) AS excerr, " query=$query"ROUND(SUM(o.exc)/SUM(o.ot)*3600,1) AS excrate, " query=$query"ROUND(ExcErr(SUM(o.sig), SUM(o.bg))/SUM(o.ot)*3600, 1) AS excrateerr, " query=$query"ROUND(SUM(o.exccor)/SUM(o.ot)*3600,1) as corexcrate, " query=$query"ROUND(ExcErr(SUM(o.sig), SUM(o.bg))/SUM(o.ot)*3600*SUM(o.exc)/SUM(o.exccor), 1) AS corexcrateerr, " # correctionfactor = exc / exc_cor query=$query"ROUND(LiMa(SUM(o.sig), SUM(o.bg)),1) AS signif, " query=$query"ROUND(SUM(o.exc)/SUM(o.ot)*3600/o.cu,1) AS cu, " query=$query"ROUND(ExcErr(SUM(o.sig), SUM(o.bg))/SUM(o.ot)*3600/o.cu, 1) AS cuerr, " query=$query"ROUND(SUM(o.exccor)/SUM(o.ot)*3600/o.cu,1) as corcu, " query=$query"ROUND(ExcErr(SUM(o.sig), SUM(o.bg))/SUM(o.ot)*3600*SUM(o.exc)/SUM(o.exccor)/o.cu, 1) AS corcuerr, " # correctionfactor = exc / exc_cor query=$query"MIN(o.zdmin) as zdmin, MAX(o.zdmax) as zdmax, " query=$query"MIN(o.th) as thmin, MAX(o.th) as thmax " query=$query"FROM (" query=$query"SELECT " query=$query"fRunID AS run, fNight AS n, " query=$query"@ot:= "$ontime" AS ot, " query=$query"fRunStart AS start, fRunStop AS stop, " query=$query"fNumExcEvts AS exc, fNumBgEvts AS bg, fNumSigEvts AS sig, " query=$query"fNumExcEvts*"$corr" AS exccor, " # put here correction factor query=$query$cu" as cu, " # make value time dependent query=$query"fZenithDistanceMin AS zdmin, fZenithDistanceMax AS zdmax, " query=$query$threshold" AS th, " query=$query"IF (@night=fNight AND FLOOR((@os+@ot)/"$bin2"./60.)<1, @bl, @bl := @bl + 1) AS b, " query=$query"IF (@night=fNight AND FLOOR((@os+@ot)/"$bin2"./60.)<1, @os:=@os + @ot, @os := @ot) AS os, @" query=$query"night :=fNight AS night FROM AnalysisResultsRunLP " query=$query"LEFT JOIN RunInfo USING (fNight, fRunID) " query=$query"CROSS JOIN (SELECT @night :=0, @ot :=0, @os :=0, @bl:=0) PARAMS " query=$query"WHERE fSourceKey="$sourcekey" AND fNight="$night" AND NOT ISNULL(fNumExcEvts) " # if [ "$1" != "" ] # then # query=$query" AND fRunID <="${results[$num+3]} # fi query=$query" ORDER BY fRunStart " # if [ "$1" != "" ] # then # query=$query"DESC" # fi query=$query" ) o GROUP BY b HAVING ontime > "$bin2"*0.75 ORDER BY start " # if [ "$1" != "" ] # then # query=$query"DESC" # fi #echo $query } function print_voevent_file() { # put here voevent file # make sure that it is written to amon-folder echo '' echo '' echo ' ' echo ' ivo://FACT' echo ' '`date +%F\ %H:%M:%S`'' echo ' ' # FACT = 5 in AMON echo ' ' echo ' ' echo ' Stream number' echo ' ' # event identifier # nightly binning: night+sourcekey # 20 min binning: night+source+? echo ' ' echo ' Id number' echo ' ' # keep 0 for the beginning # how to handle updates? e.g. in case of full disk and random processing echo ' ' echo ' Revision number' echo ' ' # what to do with this? in IC it's number of neutrinos, but does number of gammas make sense? # (excevts? depends on time range...) echo ' ' echo ' Number of events' echo ' ' # get time window (stop of last - start of first run) # maybe: don't send alert if time window > xxx echo ' ' echo ' Time window of the burst' echo ' ' # probably 0 echo ' ' echo ' Uncertainty of the time window' echo ' ' # FPR - to be calculated echo ' ' echo ' False positive rate' echo ' ' # some significane value - how significant is the alert? # to be discussed and calculated echo ' ' echo ' Pvalue' echo ' ' # use source RA/Dec? but then naming doesn't fit # -> leave empty for the moment echo ' ' echo ' Pointing RA' echo ' ' echo ' ' echo ' Pointing Dec' echo ' ' # shape of psf - to be determined - fit thetaplot? for crab or mc echo ' ' echo ' Type of psf (skymap, fisher, kent, king)' echo ' ' echo ' ' # which other information to add? echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' longitude' echo ' latitude' echo ' elevation' echo ' ' echo ' -17.88' echo ' 28.76' echo ' 2200' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' RA' echo ' Dec' echo ' ' echo ' '${sourceinfo[2]}'' # RA in deg echo ' '${sourceinfo[0]}'' # decl in deg echo ' ' echo ' 0.1' # PSF of FACT echo ' ' echo ' ' echo ' ' echo ' ' echo ' ' echo ' FACT flare event information' echo '' } function evaluate_result() { oldexc=0 exc=0 excold=0 slope=0 slopeprev=0 i=0 # be careful with start and stop (space inbetween) -> 27 columns instead of 25 while [ 0 -lt 1 ] do trigger=0 num=`echo "$i * 27" | bc -l` if [ "${results[$num]}" = "" ] then break fi #night=${results[$num+1]} runid=${results[$num+2]} sig=${results[$num+18]} # significance #exc=${results[$num+14]} # excrate excold=$exc exc=${results[$num+19]} # excrate in CU if [ "$onlyifhigher" = "yes" ] then higher=` echo " $exc > $oldexc " | bc ` if [ $higher -eq 1 ] then # keep old value oldexc=$exc fi fi if [ "$bin" = "" ] then echo " ontime: "${results[$num+8]}" h" >> $logfile fi # fast rise/decay trigger if [ $triggertype -eq 3 ] then slopeprev=$slope sigprev=$sig if [ "$excold" = "0" ] then slope=0 else slope=`echo " scale=1; ( $exc - $excold ) / ( $bin / 60. ) " | bc -l ` fi get_query_minute_binning 60 #echo $query results2=( `sendquery` ) # need to check last hour backward if [ "${results2[19]}" = "" ] then slope60=0 sig60=0 else sig60=${results2[18]} # significance 1h if [ "${results2[27+19]}" = "" ] then # maybe treat this case differently slope60=${results2[19]} else slope60=`echo " ${results2[19]} - ${results2[27+19]} " | bc -l` # ie /1h fi fi #echo "exc "$exc" excold "$excold #echo "slope "$slope" prev "$slopeprev" sig "$sig" prevsig "$sigprev" sig60 "$sig60" slope60 "$slope60 fi # missing: probably one should check also 20 min binning (s example 20170103) # missing: check on still available observation time echo " "$i" "${results[$num+2]}"-"${results[$num+3]}"["${results[$num+8]}"] "$exc" "$sig >> $logfile case $triggertype in 1) #echo "std trigger: criteria ( $exc >= $exclimit && $sig >= $siglimit && $higher )" trigger=`echo " $exc >= $exclimit && $sig >= $siglimit && $higher " | bc -l` limits=$exclimit"_"$siglimit ;; 2) #echo "magic 501: criteria ( $exc >= $exclimit && $sig >= $siglimit && $higher )" trigger=`echo " $exc >= $exclimit && $sig >= $siglimit && $higher " | bc -l` limits=$exclimit"_"$siglimit ;; 3) #echo "magic fast rise/decay" #trigger=`echo " $slope >= $slopelimit && $slopeprev >= $slopelimit && $slope60 >= $slopelimit && $sig >= $siglimit " | bc -l` trigger=`echo " ( ( $slope >= $slopelimit && $slopeprev >= $slopelimit && $slope60 >= $slopelimit ) || ( - $slope >= $slopelimit && - $slopeprev >= $slopelimit && - $slope60 >= $slopelimit ) ) && $sig >= $siglimit " | bc -l` limits=$slopelimit"_"$siglimit ;; 5) #echo "std trigger: criteria ( $exc >= $exclimit && $sig >= $siglimit && $higher )" trigger=`echo " $exc >= $exclimit && $sig >= $siglimit && $higher " | bc -l` limits=$exclimit"_"$siglimit ;; *) echo $triggertype" not yet implemented" >> $logfile ;; esac # missing: maybe use $donetriggerfile for all cases - update of nightly case can be triggered differently # missing: adapt triggerfilename for type 3 (slopelimit) if [ $trigger -eq 1 ] then # do whatever to be done to trigger # - send email/sms / call # - create amon file # - prepare email for alert # - entry in DB if [ "$bin" = "" ] then # nightly file: simply overwrite, but do not send trigger again # or check value if it's increasing? # missing: get information from previous trigger + interpret triggerfile=$flarealertspath"/"$night"-"$limits"-source"$sourcekey".trigger"$triggertype else # smaller binning: check if trigger is identical triggerfile=$flarealertspath"/"$night"_"`printf %03d $runid`"-"$limits"-source"$sourcekey".trigger"$triggertype fi donetriggerfile=$triggerfile".done" #ls $donetriggerfile # check if nightly flux increased if [ "$bin" = "" ] then trigger2=1 changedfiles=( `ls $donetriggerfile"-changed-"* 2>/dev/null` ) if [ ${#changedfiles[@]} -gt 0 ] then oldnightlyexc=`grep "corr. cu: [0-9].[0-9] " ${changedfiles[@]} | grep -o -E ' [0-9].[0-9] ' | sort | tail -1` trigger2=`echo " $exc > $oldnightlyexc " | bc -l` echo "trigger2: "$trigger2" (exc: "$exc", oldexc: "$oldnightlyexc")" echo "trigger2: "$trigger2" (exc: "$exc", oldexc: "$oldnightlyexc")" >> $logfile fi fi # write new file only if old files do not agree if [ -e $donetriggerfile ] then diff $donetriggerfile $triggerfile >/dev/null checkstatus=`echo $?` if [ $checkstatus -eq 0 ] then echo " alert already done "$donetriggerfile >> $logfile i=`echo $i +1 | bc -l` continue fi fi if [ -e $triggerfile ] then mv $triggerfile $donetriggerfile fi #echo $night"_"$runid" "$sourcekey" -> "$triggerfile echo " writing "$triggerfile >> $logfile touch $triggerfile echo "Trigger found: " > $triggerfile echo "-------------- " >> $triggerfile echo " type: "$triggertype >> $triggerfile echo " excess limit: "$exclimit" CU" >> $triggerfile echo " significance limit: "$siglimit" sigma" >> $triggerfile if [ "$bin" = "" ] then echo " nightly binning " >> $triggerfile else echo " binning: "$bin" min" >> $triggerfile fi echo "Summary of flare event: " >> $triggerfile echo "----------------------- " >> $triggerfile echo " source: "$sourcename >> $triggerfile echo " night: "${results[$num+1]} >> $triggerfile echo " runs: "${results[$num+2]}" - "${results[$num+3]} >> $triggerfile echo " start: "${results[$num+4]}" "${results[$num+5]}" UTC" >> $triggerfile echo " stop: "${results[$num+6]}" "${results[$num+7]}" UTC" >> $triggerfile if [ "$bin" = "" ] then echo " ontime: "${results[$num+8]}" h" >> $triggerfile else echo " ontime: "${results[$num+8]}" min" >> $triggerfile fi #echo " ontime: "`echo "scale=1; ${results[$num+8]} / 60. " | bc -l`" min" #scale doesn't round properly echo " signal: "${results[$num+9]}" evts" >> $triggerfile echo " background: "${results[$num+10]}" evts" >> $triggerfile echo " bgrate: "${results[$num+11]}" evts/h" >> $triggerfile echo " exc: "${results[$num+12]}" +- "${results[$num+13]}" evts" >> $triggerfile echo " excrate: "${results[$num+14]}" +- "${results[$num+15]}" evts/h" >> $triggerfile echo " corr. excrate: "${results[$num+16]}" - "${results[$num+17]}" evts/h" >> $triggerfile echo " significance: "${results[$num+18]}" sigma" >> $triggerfile echo " cu: "${results[$num+19]}" +- "${results[$num+20]}" CU" >> $triggerfile echo " corr. cu: "${results[$num+21]}" +- "${results[$num+22]}" CU" >> $triggerfile echo " zd: "${results[$num+23]}" - "${results[$num+24]}" degree" >> $triggerfile echo " th: "${results[$num+25]}" - "${results[$num+26]}" DAC counts" >> $triggerfile # additional information fast rise/decay trigger if [ $triggertype -eq 3 ] then echo "Flux doubling/halfing times: " >> $triggerfile echo "---------------------------- " >> $triggerfile echo " excess: "$exc >> $triggerfile echo " excess old: "$excold >> $triggerfile echo " significance: "$sig >> $triggerfile echo " significance old: "$sigprev >> $triggerfile echo " slope: "$excold >> $triggerfile echo " slope old: "$slopeold >> $triggerfile echo " excess 60 min: "${results2[19]} >> $triggerfile echo " excess 60 min old: "${results2[27+19]} >> $triggerfile echo " slope 60 min: "$slope60 >> $triggerfile echo " significance 60 min: "$sig60 >> $triggerfile fi if [ -e $donetriggerfile ] then diff $donetriggerfile $triggerfile >/dev/null checkstatus=`echo $?` if [ $checkstatus -gt 0 ] then # keep history of non-sent triggers donetriggerfile2=$donetriggerfile"-changed-"`date +%Y%m%d%H%M%S` cp $donetriggerfile $donetriggerfile2 fi fi # missing: get summary of whole observation # send email only of $donetriggerfile doesn't exists if ! [ -e $donetriggerfile ] then query="INSERT FlareAlerts.FlareTriggers SET fTriggerInserted=Now(), fNight="$night", fRunID="$runid", fTriggerType="$triggertype if [ "$bin" = "" ] then query=$query", fBinning=NULL" else query=$query", fBinning="$bin fi echo $query sendquery >> $logfile # AMON case: create VOEvent-File if [ $triggertype -eq 5 ] then voeventfile=$voeventpath"/"`basename $triggerfile`".xml" # missing: check for archive file # if exist -> do revision echo "creating "$voeventfile echo "Creating "$voeventfile >> $logfile #print_voevent_file print_voevent_file > $voeventfile # in amon-case no email needs to be sent else if [ "$bin" = "" ] && [ $trigger2 -eq 0 ] then continue fi echo "sending["$triggertype"] "$triggerfile echo "sending["$triggertype"] "$triggerfile >> $logfile cat $triggerfile | mail -s 'test flare alert ' -b $emailfrom -r $emailfrom $emailto #cat $triggerfile | mail -s "test flare alert for $sourcename " $emailto # that's also the cases for making a call # fill DB for shifthelper fi fi fi # counter i=`echo $i +1 | bc -l` done echo " found "$i" data point(s)." >> $logfile echo "" >> $logfile } get_average_flux() { # query average flux from DB query="SELECT ROUND(SUM(fNumExcEvts)/SUM("$ontime")*3600,1) as excrate, " query=$query"ROUND(ExcErr(SUM(fNumSigEvts), SUM(fNumBgEvts))/SUM("$ontime")*3600, 1) AS excerr, " query=$query"ROUND(SUM("$ontime")/3600.,1) AS ontime " query=$query"FROM AnalysisResultsRunLP " query=$query"LEFT JOIN RunInfo USING (fNight, fRunID) " if [ "$1" = "" ] then query=$query"WHERE fSourceKey="$sourcekey" AND NOT ISNULL(fNumExcEvts) " else query=$query"WHERE fSourceKey="$sourcekey" AND fNight BETWEEN "$1" AND "$2" AND NOT ISNULL(fNumExcEvts) " fi query=$query"GROUP BY fSourceKey " sendquery # missing - get in CU to correct for fluctuations of CU } # main part of the script # missing: check if twistd client is running # + check if there are remaining files in the to-send-folder of amon # -> send email if amon connection has problem for sourcekey in ${sourcekeys[@]} do query="SELECT fSourceName FROM Source WHERE fSourceKey="$sourcekey sourcename=`sendquery` #do not combine this with other source info as sourcename can have spaces # todo: what about data check ? # should avg include current night? total=( `get_average_flux` ) month=( `get_average_flux \`date -d $night' - 1 MONTH' +%Y%m%d\` $night` ) year=( `get_average_flux \`date -d $night' - 1 YEAR' +%Y%m%d\` $night` ) avgflux=${total[0]} avgfluxmonth=${month[0]} avgfluxyear=${year[0]} error=${total[1]} errormonth=${month[1]} erroryear=${year[1]} # getting some information on the source query="SELECT fDeclination, fRightAscension, fRightAscension/24.*15 FROM Source WHERE fSourceKey="$sourcekey sourceinfo=( `sendquery` ) # ignore Crab if [ $sourcekey -eq 5 ] then continue fi printprocesslog "INFO Evaluation for $sourcename ... " echo "Evaluation for $sourcename ... " >> $logfile echo " average fluxes: "$avgfluxmonth"+-"$errormonth" evts/h (last month) "$avgfluxyear"+-"$erroryear" evts/h (last year) "$avgflux"+-"$error" evts/h (all)" >> $logfile # missing: get limits from DB (structure needs to be defined) # triggers in the frame of the MoU in the gamma-ray community triggertype=1 # limits siglimit=3.0 # sigma #if [ $sourcekey -eq 1 ] || [ $sourcekey -eq 2 ] if [ $sourcekey -eq 1 ] || [ $sourcekey -eq 2 ] || [ $sourcekey -eq 5 ] then exclimit=3.0 # CU else exclimit=0.5 # CU fi # only if rate goes even higher, we have to react onlyifhigher="yes" higher=1 printprocesslog "INFO checking for [General gamma-ray MoU]" >> $logfile echo "[General gamma-ray MoU]" >> $logfile echo " nightly binning..." >> $logfile # checking nightly binning bin= get_query_nightly_binning results=( `sendquery` ) evaluate_result # 20 min binning bin=20 echo " "$bin" min binning..." >> $logfile get_query_minute_binning $bin results=( `sendquery` ) evaluate_result # triggers to MAGIC # Mrk 501 proposal triggertype=2 if [ $sourcekey -eq 2 ] then siglimit=3.0 exclimit=2.0 # cu printprocesslog "INFO checking for [Trigger to MAGIC 501 proposal]" >> $logfile echo "[Trigger to MAGIC 501 proposal]" >> $logfile # checking nightly binning bin= echo " nightly binning..." >> $logfile get_query_nightly_binning results=( `sendquery` ) evaluate_result # 20 min binning bin=20 echo " "$bin" min binning..." >> $logfile get_query_minute_binning $bin results=( `sendquery` ) evaluate_result fi # Mother of ToO - fast rise/decay # sources: Mrk 421, Mrk 501, 2344, 1959 triggertype=3 #if [ $sourcekey -eq 1 ] || [ $sourcekey -eq 2 ] || [ $sourcekey -eq 3 ] || [ $sourcekey -eq 7 ] || [ $sourcekey -eq 5 ] # for testing if [ $sourcekey -eq 1 ] || [ $sourcekey -eq 2 ] || [ $sourcekey -eq 3 ] || [ $sourcekey -eq 7 ] then printprocesslog "INFO checking for [Trigger to MAGIC - fast rise/decay]" >> $logfile echo "[Trigger to MAGIC - fast rise/decay]" >> $logfile ## keep thresholds low (or do not use in evaluation) #siglimit=2.0 #exclimit=0.5 # limits in slope slopelimit=1.0 # 1CU/h siglimit=3.0 # 1 sigma in 1 hour # binning bin=30 echo " "$bin" min binning..." >> $logfile get_query_minute_binning $bin results=( `sendquery` ) evaluate_result fi # X-ray ToO triggertype=4 # to be added # AMON - automatic triggers using VOEvent files triggertype=5 printprocesslog "INFO checking for [Trigger to AMON]" >> $logfile echo "[Trigger to AMON]" >> $logfile echo " details still to be defined" >> $logfile # missing: trigger limits and binning still to be defined # x times above average + significance limit ? # sub-threshold? # FP-rate to be calculated # use for the moment 0.5 CU and 3 sigma siglimit=3.0 exclimit=0.5 onlyifhigher="no" # checking nightly binning bin= echo " nightly binning..." >> $logfile get_query_nightly_binning results=( `sendquery` ) evaluate_result bin=20 echo " "$bin" min binning..." >> $logfile get_query_minute_binning $bin results=( `sendquery` ) evaluate_result echo "" >> $logfile echo "" >> $logfile done finish # for archival testing: for (( i=0; i < 100 ; i++)) do date=`date --date="-${i}days" +%Y%m%d` /home/fact/SW.automatic.processing/DataCheck/QuickLook/FlareAlerts.sh $date done