#!/bin/bash # source `dirname $0`/../Sourcefile.sh printprocesslog "INFO starting $0" # get date (before 18h there is no new data to be processed) datepath=`date --date="-19HOUR" +%Y/%m/%d` date=`date --date="-19HOUR" +%Y%m%d` # for processing by hand #datepath="2013/08/11" #date="20130811" printprocesslog "INFO processing "$datepath night=`echo $datepath | sed -e 's/\///g'` auxpathnewdaq=/newdaq/aux/$datepath # create aux directory on daq, if not yet there auxpath=/loc_data/aux/$datepath makedir $auxpath # create path for info files needed for analysis infopath=$anapath/info/$datepath makedir $infopath echo "" > $infopath/runrow.txt # create path for sequence files seqpath=$anapath/sequences/$datepath makedir $seqpath rawpathnewdaq=/newdaq/raw/$datepath rawpath=/loc_data/raw/$datepath # needed auxiliary files: # drive file with information about current source position drivefile=$auxpath/${night}.DRIVE_CONTROL_SOURCE_POSITION.fits drivefilenewdaq=$auxpathnewdaq/${night}.DRIVE_CONTROL_SOURCE_POSITION.fits # drive file with information about tracking position drivefile2=$auxpath/${night}.DRIVE_CONTROL_TRACKING_POSITION.fits drivefilenewdaq2=$auxpathnewdaq/${night}.DRIVE_CONTROL_TRACKING_POSITION.fits # file with magic weather information mweatherfile=$auxpath/${night}.MAGIC_WEATHER_DATA.fits mweatherfilenewdaq=$auxpathnewdaq/${night}.MAGIC_WEATHER_DATA.fits # file with trigger rates ratesfile=$auxpath/${night}.FTM_CONTROL_TRIGGER_RATES.fits ratesfilenewdaq=$auxpathnewdaq/${night}.FTM_CONTROL_TRIGGER_RATES.fits # file with trigger rates tempfile=$auxpath/${night}.FSC_CONTROL_TEMPERATURE.fits tempfilenewdaq=$auxpathnewdaq/${night}.FSC_CONTROL_TEMPERATURE.fits # file with trigger rates humfile=$auxpath/${night}.FSC_CONTROL_HUMIDITY.fits humfilenewdaq=$auxpathnewdaq/${night}.FSC_CONTROL_HUMIDITY.fits function rsync_aux_file() { if ls $1 >/dev/null 2>&1 then printprocesslog "INFO rsync "$1 # rsync # from newdaq (/newdaq = /fact on newdaq), rsync server newdaq::newdaq/ # to daq (/daq = /loc_data on daq) rsyncservernewdaq=`echo $1 | sed -e 's/^\//172.16.100.100::/'` # old #if ! rsync -a -T $rsynctempdir $1 $2 # new (workaround for problems on daq) if ! rsync -a -T $rsynctempdir $rsyncservernewdaq $2 then printprocesslog "WARN rsync of "$1" failed." fi else printprocesslog "WARN "$1" missing." fi } function check_daq() { diskusage=( `df -P /raid10 | grep raid10 ` ) # check if more than 700 GB are left on /loc_data if [ ${diskusage[3]} -lt $disklimitdaq ] then echo "WARN less than 700 left on /raid10 on node "$HOSTNAME printprocesslog "WARN less than 700 left on /raid10 on node "$HOSTNAME df -h /raid10 finish fi } check_daq printprocesslog "INFO get lists of raw files on newdaq and daq" files=( `find $rawpathnewdaq -type f 2>/dev/null | sort` ) if [ ${#files[@]} -eq 0 ] then printprocesslog "INFO no raw files available yet for "$datepath finish fi fileslocal=( `find $rawpath -type f | sort` ) callistofiles=( `find $anapath/callisto -type f -name $date*-calibration.log | sort` ) numdataruns=0 # create raw directory on daq, if not yet there makedir $rawpath #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns printprocesslog "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns while [ ${#fileslocal[@]} -ne ${#files[@]} ] || [ $numdataruns -ne ${#callistofiles[@]} ] do # only continue with script # when there is more than 10% space on daq source `dirname $0`/../Sourcefile.sh check_daq numdataruns=0 #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns printprocesslog "INFO status beginning of while-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns rsync_aux_file $drivefilenewdaq $drivefile # files on newdaq for file in ${files[@]} do printprocesslog "processing "$file localfile=`echo $file | sed -e 's/newdaq/loc_data/'` source `dirname $0`/../Sourcefile.sh # check if file is already transferred if ! ls $localfile >/dev/null 2>&1 then # check if it is drs-file # get stop time from raw-file if [ "`echo $file | grep -o drs`" == "drs" ] then nondrsfile=`echo $file | sed -e 's/[.]drs//g'` tstop=`$factpath/fitsdump -h $nondrsfile 2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'` else tstop=`$factpath/fitsdump -h $file 2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'` fi # when stop time is 0, file is not closed if [ "$tstop" == "0" ] then printprocesslog "WARN "$file" not yet closed." # if a file is not closed and not touched for 30 minutes, # it is assumed corrupted and still transferred fileaccessed=`find $file -amin -30` if ! [ "$fileaccessed" == "" ] then printprocesslog "INFO "$file" was accessed in the last 30 minutes => continue" continue else printprocesslog "WARN: "$file" has empty TSTOP but was not touched for 30 minutes" fileerror="yes" fi fi # rsync # from newdaq (/newdaq = /fact on newdaq), rsync server newdaq::newdaq/ # to daq (/daq = /loc_data on daq) # to access rsync server via the dedicated network between # daq and newdaq, use 172.16.100.100::newdaq filersyncserver=`echo $file | sed -e 's/^\//172.16.100.100::/'` # old ##if ! rsync -av --stats --progress --bwlimit=$bwlimit $file $localfile #if ! rsync -a -T $rsynctempdir --bwlimit=$bwlimit $file $localfile # new if ! rsync -a -W -T $rsynctempdir --bwlimit=$bwlimit $filersyncserver $localfile then printprocesslog "ERROR something went wrong with rsync of "$file rm $localfile continue fi printprocesslog "INFO "$file" rsynced successfully." fi if [ "`echo $localfile | grep -o drs`" != "drs" ] then runtype=`$factpath/fitsdump -h $localfile 2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z0-9._-]+[']" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"` runnum=`echo $localfile | cut -d_ -f3 | cut -d. -f1` roi=`$factpath/fitsdump -h $localfile 2>/dev/null | grep ROI | grep -v ROITM | grep -E -o "[0-9][0-9][0-9][0-9]?" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"` numevts=`$factpath/fitsdump -h $file 2>/dev/null | grep Events | grep -E -o '[0-9]+'` printprocesslog "DEBUG runnum "$runnum" runtype "$runtype" roi "$roi" numevts "$numevts if [ "$runtype" == "drs-time-upshifted" ] then printprocesslog "INFO file "$file" has runtype drs-time-upshifted -> continue " continue fi #echo $runtype" "$runnum if [ "$runtype" == "data" ] then if [ "$fileerror" = "yes" ] then printprocesslog "INFO do not further process corrupted file "$localfile fileerror= continue fi seqfile=$seqpath/${night}_${runnum}.seq printprocesslog "INFO write data-seq "$seqfile echo "# written by automatic analysis in LP" >$seqfile echo "" >> $seqfile echo "Sequence: "`echo $night | cut -c3-8`$runnum >> $seqfile echo "Night: "`echo $datepath | sed -e 's/\//-/g'` >> $seqfile echo "" >> $seqfile echo "DrsSequence: "$drsseq >> $seqfile echo "" >> $seqfile # echo $runrow" CalRuns" # echo $runrow | grep -E -o '[0-9]{3}light-pulser-ext300' | sed -e 's/light-pulser-ext300//g' echo "CalRuns: "`echo $runrow | grep -E -o '[0-9]{3}light-pulser-ext300' | sed -e 's/light-pulser-ext300//g'` >> $seqfile echo "PedRuns: "`echo $runrow | grep -E -o '[0-9]{3}pedestal300' | sed -e 's/pedestal300//g'` >> $seqfile echo "DatRuns: "$runnum >> $seqfile echo "" >> $seqfile echo "DrsFiles: "$drsfile >> $seqfile echo "" >> $seqfile echo "#DrsFile: "$drsfile >> $seqfile echo "" >> $seqfile # tstopi=`$factpath/fitsdump -h $localfile 2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'` # tstopf=`$factpath/fitsdump -h $localfile 2>/dev/null | grep TSTOPF | grep -E -o '[.][0-9]+'` # tstop=${tstopi}${tstopf} # coordinates=( `${factpath}/fitsdump ${drivefile} -c Ra_src Dec_src -r --filter='Time<'${tstop} 2>/dev/null | tail -1 2>&1` ) # if [ "${coordinates[0]}" == "" ] || [ "${coordinates[1]}" == "" ] # then # printprocesslog "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile # #echo "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile # continue # fi # if [ "${coordinates[0]}" == "0" ] || [ "${coordinates[1]}" == "0" ] # then # printprocesslog "WARN coordinates "${coordinates[@]} # #echo "WARN coordinates "${coordinates[@]} # continue # fi # printprocesslog "DEBUG coordinates "${coordinates[@]} # query="SELECT fSourceKEY FROM scheduling.source WHERE " # query=$query" fRightAscension BETWEEN "${coordinates[0]}"-0.01 AND "${coordinates[0]}"+0.01 " # query=$query" AND fDeclination BETWEEN "${coordinates[1]}"-0.01 AND "${coordinates[1]}"+0.01 " # sourcekey=`sendquery` # if [ "$sourcekey" == "" ] # then # printprocesslog "WARN sourcekey empty - coordinates"${coordinates[@]} # fi printprocesslog "INFO counting callisto logs and data files +1." # get number of callisto logs callistocount=`ps aux | grep RunCallisto | grep -E -o '20[12][0-9][01][0-9][0-3][0-9]_[0-9][0-9][0-9]' | sort | uniq | wc -l` # count data runs numdataruns=`echo " $numdataruns + 1 " | bc -l` #echo "numdata +1" #echo "cal: "$callistocount" numdat: "$numdataruns" numcallog: "${#callistofiles[@]} printprocesslog "INFO running callistos: "$callistocount" #data-runs: "$numdataruns" #callisto-logs: "${#callistofiles[@]} # do not overload system in case of a lot of files to be processed # callistocount is set in setup.fact.lp.data if [ $callistocount -ge $numcallistos ] then printprocesslog "INFO "$callistocount" RunCallisto.sh are running -> continue" #echo "INFO "$callistocount" RunCallisto.sh are running -> continue" continue fi callistolog=`dirname $seqfile | sed -e "s/sequences/callisto/"`"/"$night"_"$runnum"-calibration.log" if ! [ -e $callistolog ] then rsync_aux_file $drivefilenewdaq2 $drivefile2 rsync_aux_file $mweatherfilenewdaq $mweatherfile rsync_aux_file $ratesfilenewdaq $ratesfile rsync_aux_file $tempfilenewdaq $tempfile rsync_aux_file $humfilenewdaq $humfile #printprocesslog "INFO starting RunCallisto.sh for "$sourcekey" "$seqfile #echo "INFO starting RunCallisto.sh for "$sourcekey" "$seqfile #`dirname $0`/RunCallisto.sh $sourcekey $seqfile & printprocesslog "INFO starting RunCallisto.sh for "$seqfile #echo "INFO starting RunCallisto.sh for "$seqfile `dirname $0`/RunCallisto.sh $seqfile & fi continue else # skip a non-data run when it has not 1000 evts # as this means probably an fad-loss if [ $numevts -ne 1000 ] then printprocesslog "INFO file "$file" is a non-data file ("$runtype") and has not 1000 events ("$nmevts")" continue fi fi printprocesslog "DEBUG runrow "$runrow" (from variable) " runrow=`cat $infopath/runrow.txt` printprocesslog "DEBUG runrow "$runrow" (from file) " runrow=$runrow$runnum$runtype$roi"_" echo $runrow > $infopath/runrow.txt if echo $runrow | grep -E '[0-9]{3}drs-pedestal1024_[0-9]{3}drs-gain1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-time1024_[0-9]{3}pedestal300_[0-9]{3}pedestal300_' >/dev/null then runrow2=`echo $runrow | grep -E -o '[0-9]{3}drs-pedestal1024_[0-9]{3}drs-gain1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-time1024_[0-9]{3}pedestal300_[0-9]{3}pedestal300_'` run1=`echo $runrow2 | cut -d_ -f1 | sed -e 's/drs-pedestal1024//g'` run2=`echo $runrow2 | cut -d_ -f2 | sed -e 's/drs-gain1024//g'` run3=`echo $runrow2 | cut -d_ -f3 | sed -e 's/drs-pedestal1024//g'` run4=`echo $runrow2 | cut -d_ -f4 | sed -e 's/drs-pedestal1024//g'` run5=`echo $runrow2 | cut -d_ -f5 | sed -e 's/drs-time1024//g'` run6=`echo $runrow2 | cut -d_ -f6 | sed -e 's/pedestal300//g'` run7=`echo $runrow2 | cut -d_ -f7 | sed -e 's/pedestal300//g'` seqfile=$seqpath/${night}_${run1}.drs.seq printprocesslog "INFO write drs-seq "$seqfile echo "# written by automatic analysis in LP" > $seqfile echo "" >> $seqfile echo "Sequence: "`echo $night | cut -c3-8`$run1 >> $seqfile echo "Night: "`echo $datepath | sed -e 's/\//-/g'` >> $seqfile echo "" >> $seqfile echo "CalRuns: "$run2 >> $seqfile echo "PedRuns: "$run6" "$run7 >> $seqfile echo "DatRuns: "$run5 >> $seqfile echo "DrsRuns: "$run1" "$run3" "$run4 >> $seqfile echo "DrsFiles: "$run3" "$run6 >> $seqfile echo "" >> $seqfile echo "#DrsFile: "$run6 >> $seqfile echo "" >> $seqfile echo "" > $infopath/runrow.txt drsseq=$run1 drsfile=$run6 fi if echo $runrow | grep -E '[0-9]{3}pedestal300_[0-9]{3}light-pulser-ext300_' >/dev/null then echo "" > $infopath/runrow.txt fi fi done printprocesslog "INFO status after loop: "$callistocount" callistos running, "$numdataruns" data runs to process in total, "${#callistofiles[@]}" have already a callisto-logfile " # get new file lists printprocesslog "INFO get new file lists for "$datepath files=( `find $rawpathnewdaq -type f | sort` ) fileslocal=( `find $rawpath -type f | sort` ) callistofiles=( `find $anapath/callisto -type f -name $date*-calibration.log | sort` ) #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns printprocesslog "INFO status after for-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns # wait and get new file lists update= if [ ${#fileslocal[@]} -eq ${#files[@]} ] then printprocesslog "INFO wait 60 seconds." sleep 60 #echo "sleep 60..." printprocesslog "INFO get new file lists for "$datepath files=( `find $rawpathnewdaq -type f | sort` ) fileslocal=( `find $rawpath -type f | sort` ) callistofiles=( `find $anapath/callisto -type f -name $date*-calibration.log | sort` ) fi #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns printprocesslog "INFO status after wait end of while-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns done