Index: trunk/DataCheck/QuickLook/Step1.sh
===================================================================
--- trunk/DataCheck/QuickLook/Step1.sh	(revision 18859)
+++ trunk/DataCheck/QuickLook/Step1.sh	(revision 18869)
@@ -1,4 +1,11 @@
 #!/bin/bash
 #
+
+# to use script with /data1 instead of /scratch 
+#  (e.g. when /scratch is full) 
+# export data1=yes
+# before executing the script
+# preferably ZipRawData.sh has processed at this point all 
+#   files available already on /scratch
 
 # remarks: 
@@ -36,5 +43,19 @@
 makedir $calpath >/dev/null
 rawpathnewdaq=/newdaq/raw/$datepath
-rawpath=/loc_data/raw/$datepath
+if [ "$data1" = "yes" ]
+then 
+   rawpath=/data1/raw/$datepath
+   rsynctempdir=/data1/rsync_tmp
+   printprocesslog "INFO using "$rawpath" for processing"
+else 
+   rawpath=/scratch/raw/$datepath
+   rsynctempdir=/scratch/rsync_tmp
+fi
+if ! [ -d $rsynctempdir ]
+then
+   mkdir $rsynctempdir
+fi
+
+
 
 # needed auxiliary files: 
@@ -66,5 +87,19 @@
 function check_disks()
 {
-   # put here the check for /scratch (output of rsync) and /data1 (output of analysis)
+   # at least 5% free disk on /data1
+   diskusage=( `df -P /data1 | grep data1 ` )
+   if [ ${diskusage[3]} -lt $disklimitnewdata2 ] 
+   then
+      echo "WARN less than 5% left on /data1 on node "$HOSTNAME
+      printprocesslog "WARN less than 5% left on /data1 on node "$HOSTNAME
+      df -h /data1
+      finish
+   fi
+   
+   # if /data1 is used for processing, /scratch doesn't need to be checked
+   if [ "$data1" = "yes" ]
+   then
+      return
+   fi
    
    # at least 10% free disk on /scratch
@@ -77,13 +112,4 @@
       finish
    fi
-   # at least 5% free disk on /data1
-   diskusage=( `df -P /data1 | grep data1 ` )
-   if [ ${diskusage[3]} -lt $disklimitnewdata2 ] 
-   then
-      echo "WARN less than 5% left on /data1 on node "$HOSTNAME
-      printprocesslog "WARN less than 5% left on /data1 on node "$HOSTNAME
-      df -h /scratch
-      finish
-   fi
 }
 
@@ -92,5 +118,6 @@
 # getting lists of files
 printprocesslog "INFO get lists of raw files on newdaq and daq"
-files=( `ssh newdaq "find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort "` )
+#files=( `ssh newdaq "find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort "` )
+files=( `find $rawpathnewdaq -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort ` )
 if [ ${#files[@]} -eq 0 ]
 then
@@ -125,5 +152,4 @@
    #  when there is more than 10% space on daq
    source `dirname $0`/../Sourcefile.sh
-   check_disks
    
    numcalibrated=0
@@ -137,8 +163,18 @@
    for file in ${files[@]}
    do
+      # check if still enough diskspace for transfer
+      check_disks
+      if [ "$certaindate" != "" ]
+      then
+         echo "processing "$file
+      fi
       printprocesslog "processing "$file
-      echo "processing "$file
-      #localfile=`echo $file | sed -e 's/newdaq/scratch/'`
-      localfile=$file
+      if [ "$data1" = "yes" ]
+      then 
+         localfile=`echo $file | sed -e 's/newdaq/data1/'`
+      else
+         localfile=`echo $file | sed -e 's/newdaq/scratch/'`
+      fi
+      #localfile=$file
 
       source `dirname $0`/../Sourcefile.sh
@@ -176,5 +212,6 @@
 
          # rsync 
-         if ! rsync -au -T $rsynctempdir --bwlimit=$bwlimit newdaq:$file $localfile
+         #if ! rsync -au -T $rsynctempdir --bwlimit=$bwlimit newdaq:$file $localfile
+         if ! rsync -au -T $rsynctempdir --bwlimit=$bwlimit $file $localfile
          then
             printprocesslog "ERROR something went wrong with rsync of "$file
@@ -231,5 +268,5 @@
 
       # get run number
-      runnum=`echo $localfile | cut -d_ -f3 | cut -d. -f1`
+      runnum=`basename $localfile | cut -d_ -f2 | cut -d. -f1`
       
       # what is needed to process the different runs? 
@@ -351,5 +388,6 @@
    # get new file lists
    printprocesslog "INFO get new file lists for "$datepath
-   files=( `ssh newdaq "find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort "` )
+   #files=( `ssh newdaq "find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort "` )
+   files=( `find $rawpathnewdaq -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort ` )
    fileslocal=( `find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort` )
    callistofiles=( `find $calpath -type f -name $date*-calibration.log 2>/dev/null | sort` )
@@ -364,5 +402,6 @@
       sleep 60
       printprocesslog "INFO get new file lists for "$datepath
-      files=( `ssh newdaq "find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort "` )
+      #files=( `ssh newdaq "find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort "` )
+      files=( `find $rawpathnewdaq -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort ` )
       fileslocal=( `find $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort` )
       callistofiles=( `find $calpath -type f -name $date*-calibration.log 2>/dev/null | sort` )
