source: trunk/DataCheck/CheckAuxData.sh@ 12788

Last change on this file since 12788 was 12660, checked in by Daniela Dorner, 13 years ago
added notes
  • Property svn:executable set to *
File size: 3.9 KB
Line 
1#!/bin/bash
2
3#
4# some notes:
5#
6# get threshold (assuming that the threshold is the same for all patches and the last value is the highest)
7# fstatistic /loc_data/aux/2011/11/25/20111125_042.FTM_CONTROL_STATIC_DATA.fits[1] col=PatchThresh[1] rows=-
8
9today=`date +%F`
10logfile=/home/`whoami`/DataCheck/log/CheckRaw$today.log
11
12# setup to use ftools
13export HEADAS=/opt/heasoft-6.11/x86_64-unknown-linux-gnu-libc2.13-0/
14source $HEADAS/headas-init.sh
15
16# get last 3 nights
17dates=( `date +%Y/%m/%d` `date +%Y/%m/%d --date="-1day"` `date +%Y/%m/%d --date="-2day"` `date +%Y/%m/%d --date="-3day"` )
18dates=( `date +%Y/%m/%d` )
19
20# do rsync for rawfiles of these dates
21for date in ${dates[@]}
22do
23 echo "" >> $logfile 2>&1
24 echo "" >> $logfile 2>&1
25 echo "" >> $logfile 2>&1
26 rawdir=/loc_data/aux/$date
27 runnumber=`echo $date | sed -e 's/\///g'`
28 echo `date`": processing files in "$rawdir >> $logfile
29 # check if data are available from that night
30 if ! [ -d $rawdir ]
31 then
32 echo `date`": no data available in "$rawdir >> $logfile
33 continue
34 fi
35
36 # find all fits-files starting with the oldest file
37 echo `date`": finding files to be zipped in $rawdir..." >> $logfile
38 fitsfiles=`find $rawdir -type f -name '*fits'| sort `
39
40 # loop to zip files
41 echo `date`": zipping files in $rawdir..." >> $logfile
42 for file in $fitsfiles
43 do
44 # check if raw file was accessed in the last 30 minutes
45 isnew=`find $file -amin -5`
46 if [ "$isnew" != "" ]
47 then
48 echo $file" is not older than 30 min => continue" >> $logfile
49 continue
50 fi
51
52 # check if file is already finished
53 # original file on daq (if data was taken on daq
54 origfile=`echo $file | sed -e 's/loc_data/daq/'`
55 if [ -e $origfile ]
56 then
57 # get time of last modification as seconds since Epoch for both files
58 timeorig=`stat -c %Y $origfile`
59 timecopy=`stat -c %Y $file`
60 # compare times
61 if ! [ $timeorig -eq $timecopy ]
62 then
63 # if times are not the same, the file is still open => no zip
64 echo `date`": file "$file" not yet closed => continue" >> $logfile
65 continue
66 fi
67 else
68 # if the origfile doesn't exist, the data was probably written not on daq but on data
69 echo `date`": file "$file" was probably taken on data and not daq " >> $logfile
70 fi
71
72 # md5sum -> db
73 # runtype
74 # position/source name
75 # # evts
76 # trigger setup
77 # roi
78 # check run#
79
80 # both
81 md5sum=`md5sum $file | cut -d' ' -f1`
82 numberfromname=`echo $file | grep -E -o '20[1-9][0-9][01][0-9][0-3][0-9]_[0-9]{3}'`
83 runnumberfromname=`echo $file | grep -E -o '20[1-9][0-9][01][0-9][0-3][0-9]'`# + compare to $runnumber (=$date)
84 # aux file
85 tablename=`echo $file | grep -E -o '[.][A-Z_]+[.]' | sed -e 's/[.]//g'`
86 # raw file
87 runnumberfromfile=`/home/fact/FACT++/fitsdump -h -t Events $file 2>/dev/null | grep NIGHT | grep -E -o '20[1-9][0-9][01][0-9][0-3][0-9]'`
88 filenumberfromfile=`/home/fact/FACT++/fitsdump -h -t Events $file 2>/dev/null | grep RUNID | grep -E -o '[0-9]{1,3}'`
89 numberfromfile=$runnumberfromfile"_"$filenumberfromfile
90 if [ "$numberfromfile" == "$numberfromname" ]
91 then
92 echo "numbers are identical"
93 fi
94 runtype=`/home/fact/FACT++/fitsdump -h -t Events $file 2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
95 roi=`/home/fact/FACT++/fitsdump -h -t Events $file 2>/dev/null | grep NROI | grep -v NROITM | grep -E -o '[0-9]{1,4}'`
96 numevents=`/home/fact/FACT++/fitsdump -h -t Events $file 2>/dev/null | grep Events | grep -E -o '[0-9]+'`
97
98 # both
99 checkfitsfile=`fverify $file | grep '0 error(s)'`
100 if [ "$checkfitsfile" == "" ]
101 then
102 echo " "
103 echo $file
104 echo "file "$file" has probably an error"
105 fi
106 exit
107 done
108done
109
110
Note: See TracBrowser for help on using the repository browser.