1 | #!/bin/bash
|
---|
2 | #
|
---|
3 | source `dirname $0`/../Sourcefile.sh
|
---|
4 | printprocesslog "INFO starting $0"
|
---|
5 |
|
---|
6 | # get date (before 18h there is no new data to be processed)
|
---|
7 | if [ "$certaindate" != "" ]
|
---|
8 | then
|
---|
9 | checkstring=`echo $certaindate | grep -E -o '20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]'`
|
---|
10 | if [ "$checkstring" = "" ]
|
---|
11 | then
|
---|
12 | echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
|
---|
13 | finish
|
---|
14 | fi
|
---|
15 | datepath=$certaindate
|
---|
16 | else
|
---|
17 | datepath=`date --date="-19HOUR" +%Y/%m/%d`
|
---|
18 | fi
|
---|
19 | date=`echo $datepath | sed -e 's/\///g'`
|
---|
20 | printprocesslog "INFO processing "$datepath
|
---|
21 |
|
---|
22 | auxpathnewdaq=/newdaq/aux/$datepath
|
---|
23 | # create aux directory on daq, if not yet there
|
---|
24 | auxpath=/loc_data/aux/$datepath
|
---|
25 | makedir $auxpath
|
---|
26 | # create path for info files needed for analysis
|
---|
27 | infopath=$anapath/info/$datepath
|
---|
28 | makedir $infopath
|
---|
29 | echo "" > $infopath/runrow.txt
|
---|
30 | # create path for sequence files
|
---|
31 | seqpath=$anapath/sequences/$datepath
|
---|
32 | makedir $seqpath
|
---|
33 | rawpathnewdaq=/newdaq/raw/$datepath
|
---|
34 | rawpath=/loc_data/raw/$datepath
|
---|
35 |
|
---|
36 | # needed auxiliary files:
|
---|
37 | # drive file with information about current source position
|
---|
38 | drivefile=$auxpath/${date}.DRIVE_CONTROL_SOURCE_POSITION.fits
|
---|
39 | drivefilenewdaq=$auxpathnewdaq/${date}.DRIVE_CONTROL_SOURCE_POSITION.fits
|
---|
40 | # drive file with information about tracking position
|
---|
41 | drivefile2=$auxpath/${date}.DRIVE_CONTROL_TRACKING_POSITION.fits
|
---|
42 | drivefilenewdaq2=$auxpathnewdaq/${date}.DRIVE_CONTROL_TRACKING_POSITION.fits
|
---|
43 | # file with magic weather information
|
---|
44 | mweatherfile=$auxpath/${date}.MAGIC_WEATHER_DATA.fits
|
---|
45 | mweatherfilenewdaq=$auxpathnewdaq/${date}.MAGIC_WEATHER_DATA.fits
|
---|
46 | # file with trigger rates
|
---|
47 | ratesfile=$auxpath/${date}.FTM_CONTROL_TRIGGER_RATES.fits
|
---|
48 | ratesfilenewdaq=$auxpathnewdaq/${date}.FTM_CONTROL_TRIGGER_RATES.fits
|
---|
49 | # file with trigger rates
|
---|
50 | tempfile=$auxpath/${date}.FSC_CONTROL_TEMPERATURE.fits
|
---|
51 | tempfilenewdaq=$auxpathnewdaq/${date}.FSC_CONTROL_TEMPERATURE.fits
|
---|
52 | # file with trigger rates
|
---|
53 | humfile=$auxpath/${date}.FSC_CONTROL_HUMIDITY.fits
|
---|
54 | humfilenewdaq=$auxpathnewdaq/${date}.FSC_CONTROL_HUMIDITY.fits
|
---|
55 |
|
---|
56 | function rsync_aux_file()
|
---|
57 | {
|
---|
58 | if ls $1 >/dev/null 2>&1
|
---|
59 | then
|
---|
60 | printprocesslog "INFO rsync "$1
|
---|
61 | # rsync
|
---|
62 | # from newdaq (/newdaq = /fact on newdaq), rsync server newdaq::newdaq/
|
---|
63 | # to daq (/daq = /loc_data on daq)
|
---|
64 | rsyncservernewdaq=`echo $1 | sed -e 's/^\//172.16.100.100::/'`
|
---|
65 | # old
|
---|
66 | #if ! rsync -a -T $rsynctempdir $1 $2
|
---|
67 | # new (workaround for problems on daq)
|
---|
68 | if ! rsync -a -T $rsynctempdir $rsyncservernewdaq $2
|
---|
69 | then
|
---|
70 | printprocesslog "WARN rsync of "$1" failed."
|
---|
71 | fi
|
---|
72 | else
|
---|
73 | printprocesslog "WARN "$1" missing."
|
---|
74 | fi
|
---|
75 | }
|
---|
76 |
|
---|
77 | function check_daq()
|
---|
78 | {
|
---|
79 | diskusage=( `df -P /raid10 | grep raid10 ` )
|
---|
80 | # check if more than 700 GB are left on /loc_data
|
---|
81 | if [ ${diskusage[3]} -lt $disklimitdaq ]
|
---|
82 | then
|
---|
83 | #echo "WARN less than 700 left on /raid10 on node "$HOSTNAME" ("${diskusage[3]}")"
|
---|
84 | printprocesslog "DISK less than 700 left on /raid10 on node "$HOSTNAME" ("${diskusage[3]}")"
|
---|
85 | #df -h /raid10
|
---|
86 | finish
|
---|
87 | fi
|
---|
88 | }
|
---|
89 |
|
---|
90 | check_daq
|
---|
91 |
|
---|
92 | printprocesslog "INFO get lists of raw files on newdaq and daq"
|
---|
93 | files=( `find $rawpathnewdaq -type f -regex '.*[.]fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort` )
|
---|
94 | # to treat links use:
|
---|
95 | #files=( `find -L $rawpathnewdaq -regex '.*[.]fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort` )
|
---|
96 |
|
---|
97 | if [ ${#files[@]} -eq 0 ]
|
---|
98 | then
|
---|
99 | printprocesslog "INFO no raw files available yet for "$datepath
|
---|
100 | finish
|
---|
101 | fi
|
---|
102 | fileslocal=( `find $rawpath -type f -regex '.*[.]fits[.]?[g]?[f]?[z]?' | sort` )
|
---|
103 | callistofiles=( `find $anapath/callisto -type f -name $date*-calibration.log | sort` )
|
---|
104 | # get number of dataruns from DB
|
---|
105 | query="SELECT Count(*) FROM RunInfo WHERE fNight="$date" AND fRunTypeKey=1"
|
---|
106 | numdataruns=`sendquery`
|
---|
107 |
|
---|
108 | # create raw directory on daq, if not yet there
|
---|
109 | makedir $rawpath
|
---|
110 |
|
---|
111 | #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
112 | printprocesslog "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
113 |
|
---|
114 | while [ ${#fileslocal[@]} -ne ${#files[@]} ] || [ $numdataruns -ne ${#callistofiles[@]} ] # || [ $numdataruns -ne 0 ] # FIXME: Logik ueberdenken u ueberarb
|
---|
115 | do
|
---|
116 | # only continue with script
|
---|
117 | # when there is more than 10% space on daq
|
---|
118 | source `dirname $0`/../Sourcefile.sh
|
---|
119 | check_daq
|
---|
120 |
|
---|
121 | numdataruns=0
|
---|
122 | #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
123 | printprocesslog "INFO status beginning of while-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
124 |
|
---|
125 | rsync_aux_file $drivefilenewdaq $drivefile
|
---|
126 |
|
---|
127 | # files on newdaq
|
---|
128 | for file in ${files[@]}
|
---|
129 | do
|
---|
130 | printprocesslog "processing "$file
|
---|
131 | #echo "processing "$file
|
---|
132 | localfile=`echo $file | sed -e 's/newdaq/loc_data/'`
|
---|
133 |
|
---|
134 | source `dirname $0`/../Sourcefile.sh
|
---|
135 | # check if file is already transferred
|
---|
136 | if ! ls $localfile >/dev/null 2>&1
|
---|
137 | then
|
---|
138 | # check if it is drs-file
|
---|
139 | # get stop time from raw-file
|
---|
140 | if [ "`echo $file | grep -o drs`" == "drs" ]
|
---|
141 | then
|
---|
142 | nondrs=`basename $file | sed -e 's/[.]drs//g'`
|
---|
143 | nondrsfile=`find $rawpath -name $nondrs.*z`
|
---|
144 | #echo "nondrs: "$nondrsfile
|
---|
145 | tstop=`$factpath/fitsdump -h $nondrsfile 2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
|
---|
146 | else
|
---|
147 | tstop=`$factpath/fitsdump -h $file 2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
|
---|
148 | fi
|
---|
149 | #echo "tstop -"$tstop"-"
|
---|
150 | # when stop time is 0, file is not closed
|
---|
151 | # when an error is returned the tstop is empty
|
---|
152 | if [ "$tstop" == "0" ] || [ "$tstop" == "" ]
|
---|
153 | then
|
---|
154 | printprocesslog "WARN "$file" not yet closed."
|
---|
155 | # if a file is not closed and not touched for 30 minutes,
|
---|
156 | # it is assumed corrupted and still transferred
|
---|
157 | fileaccessed=`find $file -amin -30`
|
---|
158 | if ! [ "$fileaccessed" == "" ]
|
---|
159 | then
|
---|
160 | printprocesslog "INFO "$file" was accessed in the last 30 minutes => continue"
|
---|
161 | continue
|
---|
162 | else
|
---|
163 | printprocesslog "WARN: "$file" has empty TSTOP but was not touched for 30 minutes"
|
---|
164 | fileerror="yes"
|
---|
165 | fi
|
---|
166 | fi
|
---|
167 |
|
---|
168 | # rsync
|
---|
169 | # from newdaq (/newdaq = /fact on newdaq), rsync server newdaq::newdaq/
|
---|
170 | # to daq (/daq = /loc_data on daq)
|
---|
171 | # to access rsync server via the dedicated network between
|
---|
172 | # daq and newdaq, use 172.16.100.100::newdaq
|
---|
173 | filersyncserver=`echo $file | sed -e 's/^\//172.16.100.100::/'`
|
---|
174 | # old
|
---|
175 | ##if ! rsync -av --stats --progress --bwlimit=$bwlimit $file $localfile
|
---|
176 | #if ! rsync -a -T $rsynctempdir --bwlimit=$bwlimit $file $localfile
|
---|
177 | # new
|
---|
178 | if ! rsync -a -W -T $rsynctempdir --bwlimit=$bwlimit $filersyncserver $localfile
|
---|
179 | then
|
---|
180 | printprocesslog "ERROR something went wrong with rsync of "$file
|
---|
181 | rm $localfile
|
---|
182 | continue
|
---|
183 | fi
|
---|
184 | printprocesslog "INFO "$file" rsynced successfully."
|
---|
185 | fi
|
---|
186 |
|
---|
187 | if [ "`echo $localfile | grep -o drs`" != "drs" ]
|
---|
188 | then
|
---|
189 | runtype=`$factpath/fitsdump -h $localfile 2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z0-9._-]+[']" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"`
|
---|
190 | runnum=`echo $localfile | cut -d_ -f3 | cut -d. -f1`
|
---|
191 | roi=`$factpath/fitsdump -h $localfile 2>/dev/null | grep ROI | grep -v ROITM | grep -E -o "[0-9][0-9][0-9][0-9]?" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"`
|
---|
192 | numevts=`$factpath/fitsdump -h $file 2>/dev/null | grep Events | grep -E -o '[0-9]+'`
|
---|
193 | printprocesslog "DEBUG runnum "$runnum" runtype "$runtype" roi "$roi" numevts "$numevts
|
---|
194 | if [ "$runtype" == "drs-time-upshifted" ]
|
---|
195 | then
|
---|
196 | printprocesslog "INFO file "$file" has runtype drs-time-upshifted -> continue "
|
---|
197 | continue
|
---|
198 | fi
|
---|
199 | #echo $runtype" "$runnum
|
---|
200 | if [ "$runtype" == "data" ]
|
---|
201 | then
|
---|
202 | if [ "$fileerror" = "yes" ]
|
---|
203 | then
|
---|
204 | printprocesslog "INFO do not further process corrupted file "$localfile
|
---|
205 | fileerror=
|
---|
206 | continue
|
---|
207 | fi
|
---|
208 | seqfile=$seqpath/${date}_${runnum}.seq
|
---|
209 | printprocesslog "INFO write data-seq "$seqfile
|
---|
210 | echo "# written by automatic analysis in LP" >$seqfile
|
---|
211 | echo "" >> $seqfile
|
---|
212 | echo "Sequence: "`echo $date | cut -c3-8`$runnum >> $seqfile
|
---|
213 | echo "Night: "`echo $datepath | sed -e 's/\//-/g'` >> $seqfile
|
---|
214 | echo "" >> $seqfile
|
---|
215 | echo "DrsSequence: "$drsseq >> $seqfile
|
---|
216 | echo "" >> $seqfile
|
---|
217 | # echo $runrow" CalRuns"
|
---|
218 | # echo $runrow | grep -E -o '[0-9]{3}light-pulser-ext300' | sed -e 's/light-pulser-ext300//g'
|
---|
219 | echo "CalRuns: "`echo $runrow | grep -E -o '[0-9]{3}light-pulser-ext300' | sed -e 's/light-pulser-ext300//g'` >> $seqfile
|
---|
220 | echo "PedRuns: "`echo $runrow | grep -E -o '[0-9]{3}pedestal300' | sed -e 's/pedestal300//g'` >> $seqfile
|
---|
221 | echo "DatRuns: "$runnum >> $seqfile
|
---|
222 | echo "" >> $seqfile
|
---|
223 | echo "DrsFiles: "$drsfile >> $seqfile
|
---|
224 | echo "" >> $seqfile
|
---|
225 | echo "#DrsFile: "$drsfile >> $seqfile
|
---|
226 | echo "" >> $seqfile
|
---|
227 |
|
---|
228 | # tstopi=`$factpath/fitsdump -h $localfile 2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
|
---|
229 | # tstopf=`$factpath/fitsdump -h $localfile 2>/dev/null | grep TSTOPF | grep -E -o '[.][0-9]+'`
|
---|
230 | # tstop=${tstopi}${tstopf}
|
---|
231 | # coordinates=( `${factpath}/fitsdump ${drivefile} -c Ra_src Dec_src -r --filter='Time<'${tstop} 2>/dev/null | tail -1 2>&1` )
|
---|
232 | # if [ "${coordinates[0]}" == "" ] || [ "${coordinates[1]}" == "" ]
|
---|
233 | # then
|
---|
234 | # printprocesslog "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile
|
---|
235 | # #echo "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile
|
---|
236 | # continue
|
---|
237 | # fi
|
---|
238 | # if [ "${coordinates[0]}" == "0" ] || [ "${coordinates[1]}" == "0" ]
|
---|
239 | # then
|
---|
240 | # printprocesslog "WARN coordinates "${coordinates[@]}
|
---|
241 | # #echo "WARN coordinates "${coordinates[@]}
|
---|
242 | # continue
|
---|
243 | # fi
|
---|
244 | # printprocesslog "DEBUG coordinates "${coordinates[@]}
|
---|
245 | # query="SELECT fSourceKEY FROM scheduling.source WHERE "
|
---|
246 | # query=$query" fRightAscension BETWEEN "${coordinates[0]}"-0.01 AND "${coordinates[0]}"+0.01 "
|
---|
247 | # query=$query" AND fDeclination BETWEEN "${coordinates[1]}"-0.01 AND "${coordinates[1]}"+0.01 "
|
---|
248 | # sourcekey=`sendquery`
|
---|
249 | # if [ "$sourcekey" == "" ]
|
---|
250 | # then
|
---|
251 | # printprocesslog "WARN sourcekey empty - coordinates"${coordinates[@]}
|
---|
252 | # fi
|
---|
253 |
|
---|
254 | printprocesslog "INFO counting callisto logs and data files +1."
|
---|
255 | # get number of callisto logs
|
---|
256 | runcallistocount=`ps aux | grep RunCallisto | grep -E -o '20[12][0-9][01][0-9][0-3][0-9]_[0-9][0-9][0-9]' | sort | uniq | wc -l`
|
---|
257 | # count data runs
|
---|
258 | numdataruns=`echo " $numdataruns + 1 " | bc -l`
|
---|
259 | #echo "numdata +1"
|
---|
260 |
|
---|
261 | #echo "cal: "$runcallistocount" numdat: "$numdataruns" numcallog: "${#callistofiles[@]}
|
---|
262 | printprocesslog "INFO running callistos: "$runcallistocount" #data-runs: "$numdataruns" #callisto-logs: "${#callistofiles[@]}
|
---|
263 | # do not overload system in case of a lot of files to be processed
|
---|
264 | # numruncallistos is set in setup.fact.lp.data
|
---|
265 | if [ $runcallistocount -ge $numruncallistos ]
|
---|
266 | then
|
---|
267 | printprocesslog "INFO "$runcallistocount" RunCallisto.sh are running -> continue"
|
---|
268 | #echo "INFO "$runcallistocount" RunCallisto.sh are running -> continue"
|
---|
269 | continue
|
---|
270 | fi
|
---|
271 | callistolog=`dirname $seqfile | sed -e "s/sequences/callisto/"`"/"$date"_"$runnum"-calibration.log"
|
---|
272 | if ! [ -e $callistolog ]
|
---|
273 | then
|
---|
274 | rsync_aux_file $drivefilenewdaq2 $drivefile2
|
---|
275 | rsync_aux_file $mweatherfilenewdaq $mweatherfile
|
---|
276 | rsync_aux_file $ratesfilenewdaq $ratesfile
|
---|
277 | rsync_aux_file $tempfilenewdaq $tempfile
|
---|
278 | rsync_aux_file $humfilenewdaq $humfile
|
---|
279 | #printprocesslog "INFO starting RunCallisto.sh for "$sourcekey" "$seqfile
|
---|
280 | #echo "INFO starting RunCallisto.sh for "$sourcekey" "$seqfile
|
---|
281 | #`dirname $0`/RunCallisto.sh $sourcekey $seqfile &
|
---|
282 | printprocesslog "INFO starting RunCallisto.sh for "$seqfile
|
---|
283 | #echo "INFO starting RunCallisto.sh for "$seqfile
|
---|
284 | `dirname $0`/RunCallisto.sh $seqfile &
|
---|
285 | fi
|
---|
286 | continue
|
---|
287 | else
|
---|
288 | # skip a non-data run when it has not 1000 evts
|
---|
289 | # as this means probably an fad-loss
|
---|
290 | if [ $numevts -ne 1000 ]
|
---|
291 | then
|
---|
292 | printprocesslog "INFO file "$file" is a non-data file ("$runtype") and has not 1000 events ("$numevts")"
|
---|
293 | continue
|
---|
294 | fi
|
---|
295 | fi
|
---|
296 | printprocesslog "DEBUG runrow "$runrow" (from variable) "
|
---|
297 | runrow=`cat $infopath/runrow.txt`
|
---|
298 | printprocesslog "DEBUG runrow "$runrow" (from file) "
|
---|
299 | runrow=$runrow$runnum$runtype$roi"_"
|
---|
300 | echo $runrow > $infopath/runrow.txt
|
---|
301 | if echo $runrow | grep -E '[0-9]{3}drs-pedestal1024_[0-9]{3}drs-gain1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-time1024_[0-9]{3}pedestal300_[0-9]{3}pedestal300_' >/dev/null
|
---|
302 | then
|
---|
303 | runrow2=`echo $runrow | grep -E -o '[0-9]{3}drs-pedestal1024_[0-9]{3}drs-gain1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-pedestal1024_[0-9]{3}drs-time1024_[0-9]{3}pedestal300_[0-9]{3}pedestal300_'`
|
---|
304 | run1=`echo $runrow2 | cut -d_ -f1 | sed -e 's/drs-pedestal1024//g'`
|
---|
305 | run2=`echo $runrow2 | cut -d_ -f2 | sed -e 's/drs-gain1024//g'`
|
---|
306 | run3=`echo $runrow2 | cut -d_ -f3 | sed -e 's/drs-pedestal1024//g'`
|
---|
307 | run4=`echo $runrow2 | cut -d_ -f4 | sed -e 's/drs-pedestal1024//g'`
|
---|
308 | run5=`echo $runrow2 | cut -d_ -f5 | sed -e 's/drs-time1024//g'`
|
---|
309 | run6=`echo $runrow2 | cut -d_ -f6 | sed -e 's/pedestal300//g'`
|
---|
310 | run7=`echo $runrow2 | cut -d_ -f7 | sed -e 's/pedestal300//g'`
|
---|
311 | seqfile=$seqpath/${date}_${run1}.drs.seq
|
---|
312 | printprocesslog "INFO write drs-seq "$seqfile
|
---|
313 | echo "# written by automatic analysis in LP" > $seqfile
|
---|
314 | echo "" >> $seqfile
|
---|
315 | echo "Sequence: "`echo $date | cut -c3-8`$run1 >> $seqfile
|
---|
316 | echo "Night: "`echo $datepath | sed -e 's/\//-/g'` >> $seqfile
|
---|
317 | echo "" >> $seqfile
|
---|
318 | echo "CalRuns: "$run2 >> $seqfile
|
---|
319 | echo "PedRuns: "$run6" "$run7 >> $seqfile
|
---|
320 | echo "DatRuns: "$run5 >> $seqfile
|
---|
321 | echo "DrsRuns: "$run1" "$run3" "$run4 >> $seqfile
|
---|
322 | echo "DrsFiles: "$run3" "$run6 >> $seqfile
|
---|
323 | echo "" >> $seqfile
|
---|
324 | echo "#DrsFile: "$run6 >> $seqfile
|
---|
325 | echo "" >> $seqfile
|
---|
326 | echo "" > $infopath/runrow.txt
|
---|
327 | drsseq=$run1
|
---|
328 | drsfile=$run6
|
---|
329 | fi
|
---|
330 | if echo $runrow | grep -E '[0-9]{3}pedestal300_[0-9]{3}light-pulser-ext300_' >/dev/null
|
---|
331 | then
|
---|
332 | echo "" > $infopath/runrow.txt
|
---|
333 | fi
|
---|
334 | fi
|
---|
335 | done
|
---|
336 | printprocesslog "INFO status after loop: "$runcallistocount" callistos running, "$numdataruns" data runs to process in total, "${#callistofiles[@]}" have already a callisto-logfile "
|
---|
337 |
|
---|
338 | # get new file lists
|
---|
339 | printprocesslog "INFO get new file lists for "$datepath
|
---|
340 | files=( `find $rawpathnewdaq -type f -regex '.*[.]fits[.]?[g]?[f]?[z]?' | sort` )
|
---|
341 | fileslocal=( `find $rawpath -type f -regex '.*[.]fits[.]?[g]?[f]?[z]?' | sort` )
|
---|
342 | callistofiles=( `find $anapath/callisto -type f -name $date*-calibration.log | sort` )
|
---|
343 | #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
344 | printprocesslog "INFO status after for-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
345 |
|
---|
346 | # wait and get new file lists
|
---|
347 | update=
|
---|
348 | if [ ${#fileslocal[@]} -eq ${#files[@]} ]
|
---|
349 | then
|
---|
350 | printprocesslog "INFO wait 60 seconds."
|
---|
351 | sleep 60
|
---|
352 | #echo "sleep 60..."
|
---|
353 | printprocesslog "INFO get new file lists for "$datepath
|
---|
354 | files=( `find $rawpathnewdaq -type f -regex '.*[.]fits[.]?[g]?[f]?[z]?' | sort` )
|
---|
355 | fileslocal=( `find $rawpath -type f -regex '.*[.]fits[.]?[g]?[f]?[z]?' | sort` )
|
---|
356 | callistofiles=( `find $anapath/callisto -type f -name $date*-calibration.log | sort` )
|
---|
357 | fi
|
---|
358 | #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
359 | printprocesslog "INFO status after wait end of while-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #data-runs:"$numdataruns
|
---|
360 |
|
---|
361 | # this should not happen, but may in rare cases if data are deleted in the wrong order
|
---|
362 | if [ ${#files[@]} -lt ${#fileslocal[@]} ]
|
---|
363 | then
|
---|
364 | printprocesslog "ERROR numfiles on newdaq("${#files[@]}") < #files on daq ("${#fileslocal[@]}") - maybe deleting data in the wrong order?"
|
---|
365 | echo "ERROR numfiles on newdaq("${#files[@]}") < #files on daq ("${#fileslocal[@]}") - maybe deleting data in the wrong order?"
|
---|
366 | finish
|
---|
367 | fi
|
---|
368 | done
|
---|
369 |
|
---|
370 |
|
---|
371 |
|
---|
372 |
|
---|