- Timestamp:
- 11/21/11 06:56:22 (13 years ago)
- Location:
- trunk/DataCheck
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/DataCheck/RsyncAux.sh
r12576 r12582 1 1 #!/bin/bash 2 3 date=`date +%Y/%m/%d --date="-1day"`4 2 5 3 today=`date +%F` 6 4 logfile=/home/`whoami`/DataCheck/log/RsyncAux$today.log 7 5 8 echo "" >> $logfile 2>&1 9 echo "" >> $logfile 2>&1 10 echo "" >> $logfile 2>&1 11 # auxiliary data directories on data 12 auxdirdaq=/daq/aux/$date/ # /loc_data from daq via nfs on data 13 auxdirdata=/loc_data/aux/$date 14 echo `date`": processing files in "$auxdir >> $logfile 2>&1 6 # get last 3 nights 7 dates=( `date +%Y/%m/%d` `date +%Y/%m/%d --date="-1day"` `date +%Y/%m/%d --date="-2day"` `date +%Y/%m/%d --date="-3day"` ) 15 8 16 # get current hour 17 hour=`date +%k` 18 # define bwlimit for rsync depending on the time 19 if [ $hour -lt 8 ] || [ $hour -gt 18 ] 20 then 21 # limit bw for rsync to 20 MB/s during night 22 bwlimit="--bwlimit=20000" 23 echo "rsync data with "$bwlimit >> $logfile 2>&1 24 else 25 # no bw limit during day 26 echo "rsync data without bwlimit" >> $logfile 2>&1 27 fi 9 # do rsync for rawfiles of these dates 10 for date in ${dates[@]} 11 do 12 echo "" >> $logfile 2>&1 13 echo "" >> $logfile 2>&1 14 echo "" >> $logfile 2>&1 15 # auxiliary data directories on data 16 auxdirdaq=/daq/aux/$date/ # /loc_data from daq via nfs on data 17 if ! [ -d $auxdirdaq ] 18 then 19 echo `date`": "$auxdirdaq" not found. Data were probably taken on data" >> $logfile 2>&1 20 continue 21 fi 22 auxdirdata=/loc_data/aux/$date 23 echo `date`": processing files in "$auxdirdaq >> $logfile 2>&1 24 25 # get current hour 26 hour=`date +%k` 27 # define bwlimit for rsync depending on the time 28 if [ $hour -lt 8 ] || [ $hour -gt 18 ] 29 then 30 # limit bw for rsync to 20 MB/s during night 31 bwlimit="--bwlimit=20000" 32 echo "rsync data with "$bwlimit >> $logfile 2>&1 33 else 34 # no bw limit during day 35 echo "rsync data without bwlimit" >> $logfile 2>&1 36 fi 37 38 #rsync from daq to data 39 if ! /usr/bin/rsync -avxHP $bwlimit $auxdirdaq $auxdirdata >> $logfile 2>&1 40 then 41 echo `date`": problem rsyncing auxiliary data from daq to data" 42 fi 43 done 28 44 29 #rsync from daq to data30 if ! /usr/bin/rsync -avxHP $bwlimit $auxdirdaq $auxdirdata >> $logfile 2>&131 then32 echo `date`": problem rsyncing auxiliary data from daq to data"33 fi34 35 -
trunk/DataCheck/RsyncRaw.sh
r12576 r12582 1 1 #!/bin/bash 2 3 date=`date +%Y/%m/%d --date="-1day"`4 2 5 3 today=`date +%F` 6 4 logfile=/home/`whoami`/DataCheck/log/RsyncRaw$today.log 7 5 8 echo "" >> $logfile 2>&1 9 echo "" >> $logfile 2>&1 10 echo "" >> $logfile 2>&1 11 # raw data directories on data 12 rawdirdaq=/daq/raw/$date/ # /loc_data from daq via nfs on data 13 rawdirdata=/loc_data/raw/$date 14 echo `date`": processing files in "$rawdir >> $logfile 2>&1 6 # get last 3 nights 7 dates=( `date +%Y/%m/%d` `date +%Y/%m/%d --date="-1day"` `date +%Y/%m/%d --date="-2day"` `date +%Y/%m/%d --date="-3day"` ) 15 8 16 # get current hour 17 hour=`date +%k` 18 # define bwlimit for rsync depending on the time 19 if [ $hour -lt 8 ] || [ $hour -gt 18 ] 20 then 21 # limit bw for rsync to 20 MB/s during night 22 bwlimit="--bwlimit=20000" 23 echo "rsync data with "$bwlimit >> $logfile 2>&1 24 else 25 # no bw limit during day 26 echo "rsync data without bwlimit" >> $logfile 2>&1 27 fi 9 # do rsync for rawfiles of these dates 10 for date in ${dates[@]} 11 do 12 echo "" >> $logfile 2>&1 13 echo "" >> $logfile 2>&1 14 echo "" >> $logfile 2>&1 15 # raw data directories on data 16 rawdirdaq=/daq/raw/$date/ # /loc_data from daq via nfs on data 17 if ! [ -d $rawdirdaq ] 18 then 19 echo `date`": "$rawdirdaq" not found. Data were probably taken on data" >> $logfile 2>&1 20 continue 21 fi 22 rawdirdata=/loc_data/raw/$date 23 echo `date`": processing files in "$rawdirdaq >> $logfile 2>&1 28 24 29 #rsync from daq to data 30 if ! /usr/bin/rsync -avxHP $bwlimit $rawdirdaq $rawdirdata >> $logfile 2>&1 31 then 32 echo `date`": problem rsyncing rawdata from daq to data" 33 fi 25 # get current hour 26 hour=`date +%k` 27 # define bwlimit for rsync depending on the time 28 if [ $hour -lt 8 ] || [ $hour -gt 18 ] 29 then 30 # limit bw for rsync to 20 MB/s during night 31 bwlimit="--bwlimit=20000" 32 echo "rsync data with "$bwlimit >> $logfile 2>&1 33 else 34 # no bw limit during day 35 echo "rsync data without bwlimit" >> $logfile 2>&1 36 fi 37 38 #rsync from daq to data 39 if ! /usr/bin/rsync -avxHP $bwlimit $rawdirdaq $rawdirdata >> $logfile 2>&1 40 then 41 echo `date`": problem rsyncing rawdata from daq to data" 42 fi 43 done 34 44 35 45 -
trunk/DataCheck/ZipRawData.sh
r12575 r12582 1 1 #!/bin/bash 2 3 year=`date +%Y --date="-1day"`4 month=`date +%m --date="-1day"`5 2 6 3 today=`date +%F` 7 4 logfile=/home/`whoami`/DataCheck/log/ZipRaw$today.log 8 5 9 echo "" >> $logfile 2>&1 10 echo "" >> $logfile 2>&1 11 echo "" >> $logfile 2>&1 12 rawdir=/loc_data/raw/$year/$month 13 echo `date`": processing files in "$rawdir >> $logfile 6 # get last 3 nights 7 dates=( `date +%Y/%m/%d` `date +%Y/%m/%d --date="-1day"` `date +%Y/%m/%d --date="-2day"` `date +%Y/%m/%d --date="-3day"` ) 14 8 15 #find all directories in $rawdir 16 dirs=`find $rawdir -type d | sort` 17 18 echo `date`": create missing directories in /loc_data/zipraw/$year/$month" >> $logfile 19 for dir in $dirs 9 # do rsync for rawfiles of these dates 10 for date in ${dates[@]} 20 11 do 21 zipdir=`echo $dir | sed -e 's/raw/zipraw/'` 22 # check if output directory for zip already exists 23 if [ -d $zipdir ] 24 then 12 echo "" >> $logfile 2>&1 13 echo "" >> $logfile 2>&1 14 echo "" >> $logfile 2>&1 15 rawdir=/loc_data/raw/$date 16 echo `date`": processing files in "$rawdir >> $logfile 17 # check if data are available from that night 18 if ! [ -d $rawdir ] 19 then 20 echo `date`": no data available in "$rawdir >> $logfile 25 21 continue 26 22 fi 27 # create output directory for zip 28 mkdir -pv $zipdir >> $logfile 29 done 30 31 # find all fits-files starting with the oldest file 32 echo `date`": finding files to be zipped in $rawdir..." >> $logfile 33 fitsfiles=`find $rawdir -type f -name '*fits'| sort ` 34 35 # loop to zip files 36 echo `date`": zipping files in $rawdir..." >> $logfile 37 for file in $fitsfiles 38 do 39 # filename for temporary and final zipfile 40 zipfile=`echo $file | sed -e 's/raw/zipraw/' -e 's/fits/fits.gz/'` 41 zipfiletmp=`echo $file | sed -e 's/raw/zipraw/' -e 's/fits/fits.tmp.gz/'` 42 # check if zipped file already exists 43 if [ -e $zipfile ] 44 then 45 continue 23 zipdir=/loc_data/zipraw/$date 24 if ! [ -d $zipdir ] 25 then 26 # create output directory for zip 27 mkdir -pv $zipdir >> $logfile 46 28 fi 47 29 48 # check if files was accessed in the last 30 minutes 49 isnew=`find $file -amin -30` 50 if [ "$isnew" != "" ] 51 then 52 echo $file" is not older than 30 min => continue" >> $logfile 53 continue 54 fi 30 # find all fits-files starting with the oldest file 31 echo `date`": finding files to be zipped in $rawdir..." >> $logfile 32 fitsfiles=`find $rawdir -type f -name '*fits'| sort ` 55 33 56 # check if file is already finished 57 # original file on daq (if data was taken on daq 58 origfile=`echo $file | sed -e 's/loc_data/daq/'` 59 if [ -e $origfile ] 60 then 61 # get time of last modification as seconds since Epoch for both files 62 timeorig=`stat -c %Y $origfile` 63 timecopy=`stat -c %Y $file` 64 # compare times 65 if ! [ $timeorig -eq $timecopy ] 34 # loop to zip files 35 echo `date`": zipping files in $rawdir..." >> $logfile 36 for file in $fitsfiles 37 do 38 # filename for temporary and final zipfile 39 zipfile=`echo $file | sed -e 's/raw/zipraw/' -e 's/fits/fits.gz/'` 40 zipfiletmp=`echo $file | sed -e 's/raw/zipraw/' -e 's/fits/fits.tmp.gz/'` 41 # check if zipped file already exists 42 if [ -e $zipfile ] 66 43 then 67 # if times are not the same, the file is still open => no zip68 echo `date`": file "$file" not yet closed => continue" >> $logfile69 44 continue 70 45 fi 71 else72 # if the origfile doesn't exist, the data was probably written not on daq but on data73 echo `date`": file "$file" was probably taken on data and not daq " >> $logfile74 fi75 46 76 echo `date`": zipping "$file" to "$zipfile" ..." >> $logfile 77 # zip file to stdout and pipe it to outputfile 78 if pigz -1 -c -f $file > $zipfiletmp; 79 then 80 # if successful, move temporary to final zipfile 81 mv -v $zipfiletmp $zipfile >> $logfile 82 else 83 # if not successful, remove temporary zipfile 84 rm -v $zipfiletmp >> $logfile 85 fi 47 # check if raw file was accessed in the last 30 minutes 48 isnew=`find $file -amin -30` 49 if [ "$isnew" != "" ] 50 then 51 echo $file" is not older than 30 min => continue" >> $logfile 52 continue 53 fi 54 55 # check if file is already finished 56 # original file on daq (if data was taken on daq 57 origfile=`echo $file | sed -e 's/loc_data/daq/'` 58 if [ -e $origfile ] 59 then 60 # get time of last modification as seconds since Epoch for both files 61 timeorig=`stat -c %Y $origfile` 62 timecopy=`stat -c %Y $file` 63 # compare times 64 if ! [ $timeorig -eq $timecopy ] 65 then 66 # if times are not the same, the file is still open => no zip 67 echo `date`": file "$file" not yet closed => continue" >> $logfile 68 continue 69 fi 70 else 71 # if the origfile doesn't exist, the data was probably written not on daq but on data 72 echo `date`": file "$file" was probably taken on data and not daq " >> $logfile 73 fi 74 75 echo `date`": zipping "$file" to "$zipfile" ..." >> $logfile 76 # zip file to stdout and pipe it to outputfile 77 if pigz -1 -c -f $file > $zipfiletmp; 78 then 79 # if successful, move temporary to final zipfile 80 mv -v $zipfiletmp $zipfile >> $logfile 81 else 82 # if not successful, remove temporary zipfile 83 rm -v $zipfiletmp >> $logfile 84 fi 85 done 86 86 done 87 87
Note:
See TracChangeset
for help on using the changeset viewer.