Index: branches/trigger_burst_research/Archive/ENDfixer.cpp
===================================================================
--- branches/trigger_burst_research/Archive/ENDfixer.cpp	(revision 18288)
+++ branches/trigger_burst_research/Archive/ENDfixer.cpp	(revision 18288)
@@ -0,0 +1,133 @@
+/*
+ * fitsHacker.cc
+ *
+ *  Created on: Sep 8, 2011
+ *      Author: lyard
+ */
+
+#include <fstream>
+#include <cstdlib>
+#include <iostream>
+#include <cstring>
+
+using namespace std;
+/*
+ * Usage: fitsHacker <nameOfFileToHack> <numberOfBytesToSkip> <WhichCharactersToPutAfterShift>(optionnal)
+ *
+ *
+ *
+ */
+
+enum ModeT {seekingHDU,
+            foundHDU,
+            fixedEND,
+            reachedHeaderEnd};
+
+int main(int argc, char** argv)
+{
+    if (argc < 2)
+        return 0;
+
+/* ENDfixer */
+    fstream file(argv[1]);
+
+    char c[81];
+    c[80] = 0;
+    int seeking=0;
+
+    ModeT mode = seekingHDU;
+
+    bool reallyFixedEnd = false;
+    int endAddress = 0;
+
+    while (mode != fixedEND)
+    {
+        file.read(c, 80);
+        if (!file.good()) {
+            cout << 0;
+            return 0;
+        }
+        string str(c);
+//        cout << c << endl;
+        if (str.substr(0, 9) == "XTENSION=")
+            mode = foundHDU;
+
+        if (mode == foundHDU && str=="END                                                                             ")
+        {
+            mode = fixedEND;
+            endAddress = seeking;
+//            cout << "found END at " << endAddress << endl;
+        }
+        if (mode == foundHDU && str =="                                                                                ")
+        {
+            file.seekp(seeking);
+            file.put('E');
+            file.put('N');
+            file.put('D');
+            mode = fixedEND;
+            reallyFixedEnd = true;
+            endAddress = seeking;
+//            cout << "added END at " << endAddress << endl;
+        }
+
+        seeking+=80;
+    }
+
+    file.seekp(seeking-1);
+    while (mode != reachedHeaderEnd)
+    {
+        file.read(c, 80);
+        if (!file.good()) {
+            cout << 0;
+            return 0;
+        }
+        string str(c);
+
+        if (str =="                                                                                ")
+            seeking+=80;
+        else
+            mode = reachedHeaderEnd;
+    }
+
+    file.close();
+
+    if (seeking % 2880 != 0)
+    {
+        cout << "Error: header length not acceptable" << endl;
+        return 0;
+    }
+
+    if (((seeking - endAddress)/80) > 36)
+    {
+        cout << "Error: too much header space after END keyword" << endl;
+        return 0;
+    }
+
+    cout << seeking;
+
+    return seeking;
+
+/* FITS HACKER
+    file.get(data, shift);
+
+    for (int i=0;i<shift;i++)
+    {
+        if (i%80 == 0)
+            cout << "||| " << endl;
+        cout << data[i];
+    }
+    cout << endl;
+    if (argc < 4)
+        return 0;
+
+    int length = strlen(argv[3]);
+
+
+    file.seekp(shift-1);
+    for (int i=0;i<length;i++)
+        file.put(argv[3][i]);
+
+    file.close();
+
+    delete[] data;*/
+}
Index: branches/trigger_burst_research/Archive/ENDfixer.cxx
===================================================================
--- branches/trigger_burst_research/Archive/ENDfixer.cxx	(revision 18288)
+++ branches/trigger_burst_research/Archive/ENDfixer.cxx	(revision 18288)
@@ -0,0 +1,133 @@
+/*
+ * fitsHacker.cc
+ *
+ *  Created on: Sep 8, 2011
+ *      Author: lyard
+ */
+
+#include <fstream>
+#include <cstdlib>
+#include <iostream>
+#include <cstring>
+
+using namespace std;
+/*
+ * Usage: fitsHacker <nameOfFileToHack> <numberOfBytesToSkip> <WhichCharactersToPutAfterShift>(optionnal)
+ *
+ *
+ *
+ */
+
+enum ModeT {seekingHDU,
+            foundHDU,
+            fixedEND,
+            reachedHeaderEnd};
+
+int main(int argc, char** argv)
+{
+    if (argc < 2)
+        return 0;
+
+/* ENDfixer */
+    fstream file(argv[1]);
+
+    char c[81];
+    c[80] = 0;
+    int seeking=0;
+
+    ModeT mode = seekingHDU;
+
+    bool reallyFixedEnd = false;
+    int endAddress = 0;
+
+    while (mode != fixedEND)
+    {
+        file.read(c, 80);
+        if (!file.good()) {
+            cout << 0;
+            return 0;
+        }
+        string str(c);
+//        cout << c << endl;
+        if (str.substr(0, 9) == "XTENSION=")
+            mode = foundHDU;
+
+        if (mode == foundHDU && str=="END                                                                             ")
+        {
+            mode = fixedEND;
+            endAddress = seeking;
+//            cout << "found END at " << endAddress << endl;
+        }
+        if (mode == foundHDU && str =="                                                                                ")
+        {
+            file.seekp(seeking);
+            file.put('E');
+            file.put('N');
+            file.put('D');
+            mode = fixedEND;
+            reallyFixedEnd = true;
+            endAddress = seeking;
+//            cout << "added END at " << endAddress << endl;
+        }
+
+        seeking+=80;
+    }
+
+    file.seekp(seeking-1);
+    while (mode != reachedHeaderEnd)
+    {
+        file.read(c, 80);
+        if (!file.good()) {
+            cout << 0;
+            return 0;
+        }
+        string str(c);
+
+        if (str =="                                                                                ")
+            seeking+=80;
+        else
+            mode = reachedHeaderEnd;
+    }
+
+    file.close();
+
+    if (seeking % 2880 != 0)
+    {
+        cout << "Error: header length not acceptable" << endl;
+        return 0;
+    }
+
+    if (((seeking - endAddress)/80) > 36)
+    {
+        cout << "Error: too much header space after END keyword" << endl;
+        return 0;
+    }
+
+    cout << seeking;
+
+    return seeking;
+
+/* FITS HACKER
+    file.get(data, shift);
+
+    for (int i=0;i<shift;i++)
+    {
+        if (i%80 == 0)
+            cout << "||| " << endl;
+        cout << data[i];
+    }
+    cout << endl;
+    if (argc < 4)
+        return 0;
+
+    int length = strlen(argv[3]);
+
+
+    file.seekp(shift-1);
+    for (int i=0;i<length;i++)
+        file.put(argv[3][i]);
+
+    file.close();
+
+    delete[] data;*/
+}
Index: branches/trigger_burst_research/Archive/MjDtoISO.cpp
===================================================================
--- branches/trigger_burst_research/Archive/MjDtoISO.cpp	(revision 18288)
+++ branches/trigger_burst_research/Archive/MjDtoISO.cpp	(revision 18288)
@@ -0,0 +1,28 @@
+/*
+ * MjDtoISO.cc
+ *
+ *  Created on: Dec 16, 2011
+ *      Author: lyard
+ */
+
+#include "Time.h"
+#include <iostream>
+
+using namespace std;
+
+
+int main(int argc, const char** argv)
+{
+    if (argc != 2)
+    {
+        cout << "Error: only one argument is accepted" << endl;
+        return -1;
+    }
+
+    double MjD = atof(argv[1]);
+
+    Time t(MjD);
+
+    cout << t.Iso() << endl;
+
+}
Index: branches/trigger_burst_research/Archive/README
===================================================================
--- branches/trigger_burst_research/Archive/README	(revision 18288)
+++ branches/trigger_burst_research/Archive/README	(revision 18288)
@@ -0,0 +1,45 @@
+This is the FACT construction data ingest and verification scripts README
+
+In order to ingest data, use either auxIngest.sh or rawIngest.sh appropriately.
+Raw Ingest:
+	- if file is corrupted, a tentative repair is done
+		- add missing END tag
+		- reduce file size so that it re-become compliant to fits
+	- Keywords are checked and added or updated if required. 
+	- checksum is added to the file
+	- if file has been repaired successfully, then it is moved to the archive
+
+Aux Ingest:
+	- if file is corrupted, a tentative repair is done
+		- add missing END tag
+		- reduce file size so that it re-become compliant to fits
+			- if more rows seem to be present, restore extra rows while the time value seems correct (i.e. around the expected value)
+		- If mjdref was not substracted to Time column, Time column is updated accordingly
+	- Keywords are checked and added or updated if required.
+	- checksum is added to the file
+	- if file has been repaired successfully, then it is moved to the archive	
+	
+	
+Checked keywords:
+	- TSTARTI
+	- TSTARTF
+	- TSTOPI
+	- TSTOPF
+	- DATE-OBS
+	- DATE-END
+	- TELESCOP
+	- PACKAGE
+	- ORIGIN
+	- TIMEUNIT
+	- MJDREF
+	- TIMESYS
+
+It takes source and dest folders as argument, the last arg being the string to append to log filenames.
+
+You will need the programs which source is included in this folder: please compile them appropriately
+
+The ingested data headers can be checked with checkForHeaderUpdatesAux/Raw.sh
+
+while the actual data can be compared with checkRawFioleIntegrityAndContent.sh
+
+This script will not work on aux data because the time column may have been updated. hence different files. 
Index: branches/trigger_burst_research/Archive/RowChecker.cpp
===================================================================
--- branches/trigger_burst_research/Archive/RowChecker.cpp	(revision 18288)
+++ branches/trigger_burst_research/Archive/RowChecker.cpp	(revision 18288)
@@ -0,0 +1,114 @@
+/*
+ * RowChecker.cc
+ *
+ *  Created on: Dec 20, 2011
+ *      Author: lyard
+ */
+
+#include <fstream>
+#include <cstdlib>
+#include <iostream>
+#include <cstring>
+#include <sstream>
+
+using namespace std;
+
+
+//usage RowChecker <name of file> <size of header> <size of line> <mjdref> <givenLines>
+int main(int argc, char** argv)
+{
+
+    if (argc < 6)
+        return 0;
+
+    fstream file(argv[1]);
+
+    int headLen = atoi(argv[2]);
+    int lineWidth = atoi(argv[3]);
+    double mjdRef = atof(argv[4]);
+    int numLines = atoi(argv[5]);
+
+    int totalBytes = headLen;
+    file.seekp(headLen);
+
+    char* buf = new char[lineWidth];
+
+    double currentTime = 0;
+    char timeBuf[16];
+    int realNumRows = 0;
+
+    while (file.read(buf, lineWidth))
+    {
+        timeBuf[0] = buf[7];
+        timeBuf[1] = buf[6];
+        timeBuf[2] = buf[5];
+        timeBuf[3] = buf[4];
+        timeBuf[4] = buf[3];
+        timeBuf[5] = buf[2];
+        timeBuf[6] = buf[1];
+        timeBuf[7] = buf[0];
+        currentTime = reinterpret_cast<double*>(timeBuf)[0];
+
+        if (realNumRows >= numLines)
+        {
+            if (currentTime + mjdRef > 60000 || currentTime + mjdRef < 10000)
+                break;
+            if (currentTime + mjdRef > 20000 && currentTime + mjdRef < 50000)
+                break;
+        }
+//fix the time column if required.
+        if (currentTime > 50000 && currentTime < 60000)
+        {
+            currentTime -= 40587;
+            reinterpret_cast<double*>(timeBuf)[0] = currentTime;
+            file.seekp(totalBytes);
+            file.put(timeBuf[7]);
+            file.put(timeBuf[6]);
+            file.put(timeBuf[5]);
+            file.put(timeBuf[4]);
+            file.put(timeBuf[3]);
+            file.put(timeBuf[2]);
+            file.put(timeBuf[1]);
+            file.put(timeBuf[0]);
+            file.seekp(totalBytes + lineWidth);
+        }
+
+        realNumRows++;
+        totalBytes += lineWidth;
+    }
+    //now update the number of lines of the file
+    file.close();
+    file.open(argv[1]);
+    file.seekp(2880);
+    delete[] buf;
+    buf = new char[81];
+    buf[80] = 0;
+    bool changeDone = false;
+    int seeked = 2880;
+    if (realNumRows == numLines)
+        changeDone = true;
+
+    while (file.good() && !changeDone)
+    {
+        file.read(buf, 80);
+        string str(buf);
+
+        if (str.substr(0,9) == "NAXIS2  =")
+        {
+            ostringstream ss;
+            ss << realNumRows;
+            file.seekp(seeked + 30 - ss.str().size());
+            for (int i=0;i<ss.str().size();i++)
+                file.put(ss.str()[i]);
+            changeDone = true;
+            break;
+        }
+        seeked += 80;
+    }
+    if (!changeDone)
+        cout << -1;
+    else
+        cout << realNumRows;
+    file.close();
+    return realNumRows;
+}
Index: branches/trigger_burst_research/Archive/RowChecker.cxx
===================================================================
--- branches/trigger_burst_research/Archive/RowChecker.cxx	(revision 18288)
+++ branches/trigger_burst_research/Archive/RowChecker.cxx	(revision 18288)
@@ -0,0 +1,114 @@
+/*
+ * RowChecker.cc
+ *
+ *  Created on: Dec 20, 2011
+ *      Author: lyard
+ */
+
+#include <fstream>
+#include <cstdlib>
+#include <iostream>
+#include <cstring>
+#include <sstream>
+
+using namespace std;
+
+
+//usage RowChecker <name of file> <size of header> <size of line> <mjdref> <givenLines>
+int main(int argc, char** argv)
+{
+
+    if (argc < 6)
+        return 0;
+
+    fstream file(argv[1]);
+
+    int headLen = atoi(argv[2]);
+    int lineWidth = atoi(argv[3]);
+    double mjdRef = atof(argv[4]);
+    int numLines = atoi(argv[5]);
+
+    int totalBytes = headLen;
+    file.seekp(headLen);
+
+    char* buf = new char[lineWidth];
+
+    double currentTime = 0;
+    char timeBuf[16];
+    int realNumRows = 0;
+
+    while (file.read(buf, lineWidth))
+    {
+        timeBuf[0] = buf[7];
+        timeBuf[1] = buf[6];
+        timeBuf[2] = buf[5];
+        timeBuf[3] = buf[4];
+        timeBuf[4] = buf[3];
+        timeBuf[5] = buf[2];
+        timeBuf[6] = buf[1];
+        timeBuf[7] = buf[0];
+        currentTime = reinterpret_cast<double*>(timeBuf)[0];
+
+        if (realNumRows >= numLines)
+        {
+            if (currentTime + mjdRef > 60000 || currentTime + mjdRef < 10000)
+                break;
+            if (currentTime + mjdRef > 20000 && currentTime + mjdRef < 50000)
+                break;
+        }
+//fix the time column if required.
+        if (currentTime > 50000 && currentTime < 60000)
+        {
+            currentTime -= 40587;
+            reinterpret_cast<double*>(timeBuf)[0] = currentTime;
+            file.seekp(totalBytes);
+            file.put(timeBuf[7]);
+            file.put(timeBuf[6]);
+            file.put(timeBuf[5]);
+            file.put(timeBuf[4]);
+            file.put(timeBuf[3]);
+            file.put(timeBuf[2]);
+            file.put(timeBuf[1]);
+            file.put(timeBuf[0]);
+            file.seekp(totalBytes + lineWidth);
+        }
+
+        realNumRows++;
+        totalBytes += lineWidth;
+    }
+    //now update the number of lines of the file
+    file.close();
+    file.open(argv[1]);
+    file.seekp(2880);
+    delete[] buf;
+    buf = new char[81];
+    buf[80] = 0;
+    bool changeDone = false;
+    int seeked = 2880;
+    if (realNumRows == numLines)
+        changeDone = true;
+
+    while (file.good() && !changeDone)
+    {
+        file.read(buf, 80);
+        string str(buf);
+
+        if (str.substr(0,9) == "NAXIS2  =")
+        {
+            ostringstream ss;
+            ss << realNumRows;
+            file.seekp(seeked + 30 - ss.str().size());
+            for (int i=0;i<ss.str().size();i++)
+                file.put(ss.str()[i]);
+            changeDone = true;
+            break;
+        }
+        seeked += 80;
+    }
+    if (!changeDone)
+        cout << -1;
+    else
+        cout << realNumRows;
+    file.close();
+    return realNumRows;
+}
Index: branches/trigger_burst_research/Archive/associateRunsAndCalibs.sh
===================================================================
--- branches/trigger_burst_research/Archive/associateRunsAndCalibs.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/associateRunsAndCalibs.sh	(revision 18288)
@@ -0,0 +1,995 @@
+#!/bin/bash
+
+#####################################
+#
+#	CONFIGURATION VARIABLES
+#
+#####################################
+#temporary text files
+colDescFile="coldesc.lis"
+dataFile="data.lis"
+keywordsFile="keywords.lis"
+logfile="logfile.txt"
+#date for which the script should be run
+year=$1
+month=$2
+day=$3
+#target folders
+#sourceFolder="/data00/fact-construction"
+sourceFolder="/archive/fact/rev_1"
+targetFolder=`pwd`
+
+#make source folder relative to target
+common_part=$targetFolder
+back=
+while [ "$common_part" != "/" ] && [ "${sourceFolder#$common_part}" == "$sourceFolder" ] 
+do
+  common_part=`dirname $common_part`
+  back="../${back}"
+#  echo $common_part $back
+done
+baseFolder=$back${sourceFolder#$common_part}
+
+#remove useless artifacts from baseFolder
+pathChanged="true"
+while [ "$pathChanged" == "true" ]
+do
+	pathChanged="false"
+	newBaseFolder=`echo $baseFolder | sed -e 's/\/\//\//g'`
+	if [ "$newBaseFolder" != "$baseFolder" ]
+	then
+		pathChanged="true"
+		echo "$baseFolder >>> $newBaseFolder"
+	fi
+	baseFolder=$newBaseFolder
+done
+
+rawFolder=$baseFolder"/raw/"$year"/"$month"/"$day
+auxFolder=$baseFolder"/aux/"$year"/"$month"/"$day
+tempFile="./tempFile.txt"
+tempFits="./tempFits.fits"
+#input aux files
+trackingTable="DRIVE_CONTROL_TRACKING_POSITION"
+trackingFile=$auxFolder"/"$year$month$day"."$trackingTable".fits"
+triggerTable="FTM_CONTROL_TRIGGER_RATES"
+triggerFile=$auxFolder"/"$year$month$day"."$triggerTable".fits"
+staticTable="FTM_CONTROL_STATIC_DATA"
+staticFile=$auxFolder"/"$year$month$day"."$staticTable".fits"
+voltagesTable="BIAS_CONTROL_VOLTAGE"
+voltagesFile=$auxFolder"/"$year$month$day"."$voltagesTable".fits"
+currentsTable="BIAS_CONTROL_CURRENT"
+currentFile=$auxFolder"/"$year$month$day"."$currentsTable".fits"
+
+basePathAux=$auxFolder"/"$year$month$day
+
+auxTable="BIAS_CONTROL_NOMINAL"
+auxDesc="Some_Description"
+auxFile=$basePathAux"."$auxTable".fits"
+
+###########################################
+#
+#	WRITE EXTRA AUX FILE
+#	Writes an extra entry to the data file
+#	Also checks for existence of said file
+#
+##########################################
+function writeExtraAuxFile() {
+
+	auxFile=$basePathAux"."$auxTable".fits"
+	if ! [ -e $auxFile ]
+	then
+    	     dummy=3
+	     #echo "WARNING: Optionnal aux file "$auxFile" not found." | tee -a $logfile 2>&1
+	else
+	     echo $auxTable" BINTABLE URL ../../../"$auxFile" 1 1 "$auxDesc >> $dataFile	
+	fi
+}
+
+currentEntry=""
+roi=-1
+roiTM=-1
+runnumber=-1
+doNotDoThisEntry="false"
+###########################################
+#
+#	WRITE HEADER KEYS TO INPUT FILE
+#	Extract and write the header keywords to the above devined output text files. 
+#	These text files are then meant to feed the fits creation tool fcreate
+#	Large parts of this function code were retaken from Daniela's scripts
+#
+###########################################
+function writeHeaderKeysToInputFile () {
+#echo "Writing header keys for "$currentEntry
+	#get all the missing data (everything appart from roi and roiTM)
+	runtype=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+        numevents=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep Events | grep -E -o '[0-9]+'`
+        numphys=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRG ' | grep -E -o '[0-9]+'`
+        numext1=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRGEXT1' | grep -E -o "'[0-9]+'" | grep -E -o '[0-9]+'`
+        numext2=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRGEXT2' | grep -E -o "'[0-9]+'" | grep -E -o '[0-9]+'`
+        numelp=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRGLPE' | grep -E -o '[0-9]+'`
+        numilp=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRGLPI' | grep -E -o '[0-9]+'`
+        numoth=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRGMISC' | grep -E -o '[0-9]+'`
+        numped=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRGPED' | grep -E -o '[0-9]+'`
+        numtime=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'NTRGTIM' | grep -E -o '[0-9]+'`
+	dateRaw=$year$month$day
+	
+	if [ "$runtype" == "" ]
+	then
+		runtype="N/A"
+	fi
+	if [ "$numevents" == "" ]
+	then
+		numevents="0"
+	fi
+	
+#	dateRaw=`echo $currentEntry | grep -E -o '20[1-3][0-9][01][0-9][012][0-9]'`
+#echo "dateRaw: "$dateRaw" yearmonthday: "$year$month$day
+
+         # in newest data start time is in DATE-OBS
+         # in older data start time is in TSTART
+         # in the beginning TSTART was empty
+        runstart=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep DATE-OBS | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+        runstart2=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep TSTART | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+        if [ "$runstart" == ""  ]
+        then
+           if [ "$runstart2" == ""  ]
+           then
+              runstart=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep DATE | grep -v 'DATE-' | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+           else
+              runstart=$runstart2
+           fi
+        fi
+        # in newest data start time is in DATE-END
+        # in older data start time is in TSTOP
+        # in the beginning TSTOP was empty
+        runstop=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep DATE-END | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+        runstop2=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep TSTOP | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+        if [ "$runstop" == ""  ]
+        then
+           if [ "$runstop2" == ""  ]
+           then
+             runstop=`stat $currentEntry  2>/dev/null | grep Modify | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9][ ][0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{9}'`
+           else
+              runstop=$runstop2
+           fi
+        fi
+
+      # set runtype to 'unknown' if no runtype could be queried
+      if [ "$runtype" == "" ]
+      then
+         runtype="n/a"
+      fi
+      # on 15.11.2011 the runtypes had different names
+      if [ "$date" == "2011/11/15" ]
+      then
+         if [ "$runtype" == "drs-calib" ]
+         then
+            runtype="drs-gain"
+         fi
+         if [ "$runtype" == "drs-time-calib" ]
+         then
+            runtype="drs-time"
+         fi
+         if [ "$runtype" == "pedestal" ]
+         then
+            runtype="drs-pedestal"
+         fi
+         if [ "$runtype" == "light-pulser" ]
+         then
+            runtype="light-pulser-ext"
+         fi
+         if [ "$runtype" == "pedestal-on" ]
+         then
+            runtype="pedestal"
+         fi
+      fi
+	#now take care of the slow control data.
+	#first get the start and stop time of the raw file
+
+      mjdref=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`/opt/FACT++/fitsdump -h -t Events $currentEntry  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      doNotDoThisEntry="false"
+      if [ "$tstarti" == "0" ] || [ "$tstopi" == "0" ] || [ "$mjdref" == "0" ]
+      then
+         echo "ERROR: "$currentEntry" has tstart, tstop or mjdref = 0"
+      fi
+	
+      if [ "$tstartf" == "" ]
+      then
+      	doNotDoThisEntry="true"
+	echo $currentEntry >> problemWithTSTART.txt
+	echo "WARNING: "$currentEntry" Has problems with Dates"
+      fi
+      if [ "$tstarti" == "" ]
+      then
+      		tstarti="0"
+      fi
+      if [ "$tstopi" == "" ]
+      then
+      		tstopi="0"
+      fi
+      if [ "$mjdref" == "" ]
+      then
+      		mjdref="0"
+	fi
+      if [ "$tstartf" == "" ]
+      then
+      		tstartf="0"
+      fi
+      if [ "$tstopf" == "" ]
+      then
+		tstopf="0"
+      fi
+
+      tstart=`echo " $tstarti + $mjdref + $tstartf " | bc -l`
+      tstop=`echo " $tstopi + $mjdref + $tstopf " | bc -l`
+      exposure=`echo "$tstop - $tstart " | bc -l`
+      exposure=`echo "$exposure * 86400" | bc -l `
+
+      #now get relevant data from daily files
+      #first get mjdref for the aux file and adapt start time accordingly
+      auxmjdref=`/opt/FACT++/fitsdump -h -t $trackingTable $trackingFile 2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      if [ "$auxmjdref" == "" ]
+      then
+      	echo "ERROR: "$trackingFile" has no mjdref available. Aborting script"
+	exit
+      fi
+
+      tstartaux=`echo " $tstart - $auxmjdref " | bc -l` 
+      tstopaux=`echo " $tstop - $auxmjdref " | bc -l`    
+      ftcopy $trackingFile'[Time> '${tstartaux}' && Time< '${tstopaux}'][col Ra;Dec;Zd;Az;Time]' history=NO !$tempFits
+      /opt/FACT++/fitsdump $tempFits -c Ra --stat -o $tempFile 2>/dev/null
+      ramin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      ramax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      ramean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      if [ "$ramin" != "" ]
+      then
+	      ramin=`echo "$ramin * 15" | bc -l`
+      		ramax=`echo "$ramax * 15" | bc -l`
+      		ramean=`echo "$ramean * 15" | bc -l`
+	fi
+      /opt/FACT++/fitsdump $tempFits -c Dec --stat -o $tempFile 2>/dev/null
+      decmin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      decmax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      decmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      /opt/FACT++/fitsdump $tempFits -c Zd --stat -o $tempFile 2>/dev/null
+      zdmin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      zdmax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      zdmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      /opt/FACT++/fitsdump $tempFits -c Az --stat -o $tempFile 2>/dev/null
+      azmin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      azmax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      azmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+
+      auxmjdref=`/opt/FACT++/fitsdump -h -t $triggerTable $triggerFile 2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      if [ "$auxmjdref" == "" ]
+      then
+      	echo "ERROR: "$triggerFile" has no mjdref available. Aborting script"
+	exit
+      fi
+      tstartaux=`echo " $tstart - $auxmjdref " | bc -l` 
+      tstopaux=`echo " $tstop - $auxmjdref " | bc -l`        
+      ftcopy $triggerFile'[Time> '${tstartaux}' && Time< '${tstopaux}'][col TriggerRate;Time]' history=NO !$tempFits
+      /opt/FACT++/fitsdump $tempFits -c TriggerRate --stat -o $tempFile 2>/dev/null
+      ratemin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      ratemax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      ratemean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+      ratemedian=`grep 'Med' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'` 
+
+      auxmjdref=`/opt/FACT++/fitsdump -h -t $staticTable $staticFile 2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      if [ "$auxmjdref" == "" ]
+      then
+      	echo "ERROR: "$staticFile" has no mjdref available. Aborting script"
+	exit
+      fi
+      iter=0
+      timeShift=0
+      threshmin=""
+      while [ "$threshmin" == "" ] && [ "$iter" != "10" ]
+      do
+         tstartaux=`echo " $tstart - $auxmjdref - $timeShift " | bc -l` 
+         tstopaux=`echo " $tstop - $auxmjdref " | bc -l`       
+	 ftcopy $staticFile'[Time> '${tstartaux}' && Time< '${tstopaux}'][col PatchThresh;Time]' history=NO !$tempFits 
+         /opt/FACT++/fitsdump $tempFits -c PatchThresh --stat -o $tempFile 2>/dev/null
+	 threshmin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         threshmax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         threshmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         threshmedian=`grep 'Med' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'` 
+
+	 timeShift=`echo " $timeShift + 0.00011574 " | bc -l` 
+	 iter=`echo " $iter + 1 " | bc -l`
+      done
+
+      #########################################################################################################################################################
+      auxmjdref=`/opt/FACT++/fitsdump -h -t $voltagesTable $voltagesFile 2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      if [ "$auxmjdref" == "" ]
+      then
+      	echo "ERROR: "$voltagesFile" has no mjdref available. Aborting script"
+	exit
+      fi
+      iter=0
+      timeShift=0
+      biasvoltmin=""
+      while [ "$biasvoltmin" == "" ] && [ "$iter" != "10" ]
+      do
+         tstartaux=`echo " $tstart - $auxmjdref " | bc -l` 
+         tstopaux=`echo " $tstop - $auxmjdref " | bc -l`   
+         ftcopy $voltagesFile'[Time> '${tstartaux}' && Time< '${tstopaux}'][col U;Time]' !$tempFits
+         /opt/FACT++/fitsdump $tempFits -c U[0:319] --stat -o $tempFile 2>/dev/null
+         biasvoltmin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         biasvoltmax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         biasvoltmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         biasvoltmedian=`grep 'Med' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'` 
+	 timeShift=`echo " $timeShift + 0.00011574 " | bc -l`
+ 	 iter=`echo " $iter + 1 " | bc -l`
+
+      done
+
+      auxmjdref=`/opt/FACT++/fitsdump -h -t $currentsTable $currentFile 2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      if [ "$auxmjdref" == "" ]
+      then
+      	echo "ERROR: "$currentFile" has no mjdref available. Aborting script"
+	exit
+      fi
+      iter=0
+      timeShift=0
+      biascurrentmin=""
+      while [ "$biascurrentmin" == "" ] && [ "$iter" != "10" ]
+      do
+         tstartaux=`echo " $tstart - $auxmjdref " | bc -l` 
+         tstopaux=`echo " $tstop - $auxmjdref " | bc -l`       
+         ftcopy $currentFile'[Time> '${tstartaux}' && Time< '${tstopaux}'][col I;Time]' !$tempFits
+         /opt/FACT++/fitsdump $tempFits -c I[0:319] --stat -o $tempFile 2>/dev/null
+         biascurrentmin=`grep 'Min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         biascurrentmax=`grep 'Max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         biascurrentmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+         biascurrentmedian=`grep 'Med' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'` 
+	 timeShift=`echo " $timeShift + 0.00011574 " | bc -l`
+ 	 iter=`echo " $iter + 1 " | bc -l`
+
+#echo "biascurrent min, max. mean, median: "$biascurrentmin" "$biascurrentmax" "$biascurrentmean" "$biascurrentmedian
+      done
+
+      #write the retrieved data to the input file for creating fits afterwards
+      echo "RUNTYPE "$runtype" / run type"  >> $keywordsFile	
+      echo "ROI "$roi" / region of interest" >> $keywordsFile
+      echo "ROITM "$roiTM" / Roi for time markers" >> $keywordsFile
+      echo "NUMEVENT "$numevents" / number of events" >> $keywordsFile
+      echo "NUMPHYS "$numphys" / num of physical triggers" >> $keywordsFile
+      echo "NUMEXT1 "$numext1" / num of Light pulser 1 triggers" >> $keywordsFile
+      echo "NUMEXT2 "$numext2" / num of Light pulser 2 triggers" >> $keywordsFile
+      echo "NUMELP "$numelp" / num of external Light pulser triggers" >> $keywordsFile
+      echo "NUMILP "$numilp" / num of internal Light pulser triggers" >> $keywordsFile
+      echo "NUMOTH "$numoth" / num of other triggers" >> $keywordsFile
+      echo "NUMPED "$numped" / num of pedestal triggers" >> $keywordsFile
+      echo "NUMTIME "$numtime" / num of time calibration triggers" >> $keywordsFile
+      echo "DATEOBS "$runstart" / start of observation" >> $keywordsFile
+      echo "MJDREF "$mjdref" / reference of MjD values" >> $keywordsFile
+      echo "EXPOSURE "$exposure" /duration of run" >> $keywordsFile
+      echo "TSTARTI "$tstarti" / first event arrival time (int)" >> $keywordsFile
+      echo "TSTARTF "$tstartf" / first event arrival time (frac)" >> $keywordsFile
+      echo "TSTOPI "$tstopi" / last event arrival time (int)" >> $keywordsFile
+      echo "TSTOPF "$tstopf" / last event arrival time (frac)" >> $keywordsFile
+      echo "RAMIN "$ramin" / min value of right ascension" >> $keywordsFile
+      echo "RAMAX "$ramax" / max value of right ascension" >> $keywordsFile
+      echo "RAMEAN "$ramean" / mean value of right ascension" >> $keywordsFile
+      echo "DECMIN "$decmin" / min value of declination" >> $keywordsFile
+      echo "DECMAX "$decmax" / max value of declination" >> $keywordsFile
+      echo "DECMEAN "$decmean" / mean value of declination" >> $keywordsFile
+      echo "ZDMIN "$zdmin" / min value of zenith distance" >> $keywordsFile
+      echo "ZDMAX "$zdmax" / max value of zenith distance" >> $keywordsFile
+      echo "ZDMEAN "$zdmean" / mean value of zenith distance" >> $keywordsFile
+      echo "AZMIN "$azmin" / min value of azimuth" >> $keywordsFile
+      echo "AZMAX "$azmax" / max value of azimuth" >> $keywordsFile
+      echo "AZMEAN "$azmean" / mean value of azimuth" >> $keywordsFile
+      echo "RATEMIN "$ratemin" / min value of trigger rates" >> $keywordsFile
+      echo "RATEMAX "$ratemax" / max value of trigger rates" >> $keywordsFile
+      echo "RATEMEAN "$ratemean" / mean value of trigger rates" >> $keywordsFile
+      echo "RATEMED "$ratemedian" / median value of trigger rates" >> $keywordsFile
+      echo "THRESMIN "$threshmin" / min threshold value" >> $keywordsFile
+      echo "THRESMAX "$threshmax" / max threshold value" >> $keywordsFile
+      echo "THRESMEA "$threshmean" / mean threshold value" >> $keywordsFile
+      echo "THRESMED "$threshmedian" / max threshold value" >> $keywordsFile
+      echo "BIASVMIN "$biasvoltmin" / min bias voltage (V)" >> $keywordsFile
+      echo "BIASVMAX "$biasvoltmax" / max bias voltage (V)" >> $keywordsFile
+      echo "BIASVMEA "$biasvoltmean" / mean bias voltage (V)" >> $keywordsFile
+      echo "BIASVMED "$biasvoltmedian" / median bias voltage (V)" >> $keywordsFile
+      echo "BIASAMIN "$biascurrentmin" / min bias current (uA)" >> $keywordsFile
+      echo "BIASAMAX "$biascurrentmax" / max bias current (uA)" >> $keywordsFile
+      echo "BIASAMEA "$biascurrentmean" / mean bias current (uA)" >> $keywordsFile
+      echo "BIASAMED "$biascurrentmedian" / median bias current (uA)" >> $keywordsFile
+
+      
+      echo "EXTNAME GROUPING / grouping table" >> $keywordsFile
+      echo "GRPNAME FACT-RAW / name of group" >> $keywordsFile
+      echo "RUNNUM "$runnumber" / run number" >> $keywordsFile
+      echo "RUNID "$dateRaw"_"$runnumber" / Run Id" >> $keywordsFile
+
+}
+
+#today=`date +%F`
+#alias alsoToLog='tee -a $logfile 2>&1'
+
+#echo "" | tee -a $logfile 2>&1
+#echo "" | tee -a $logfile 2>&1
+#echo "" | tee -a $logfile 2>&1
+
+#cleanup logfile
+rm -f $logfile
+
+#echo `date`" executing "$0 | tee -a $logfile 2>&1
+
+
+
+#first of all, let's make sure that required slow control files are present
+if ! [ -e $trackingFile ]
+then
+    echo "ERROR: Required aux file "$trackingFile" not found. Aborting script" | tee -a $logfile 2>&1
+    exit
+fi
+if ! [ -e $triggerFile ]
+then
+    echo "ERROR: Required aux file "$triggerFile" not found. Aborting script" | tee -a $logfile 2>&1
+    exit
+fi
+if ! [ -e $staticFile ]
+then
+    echo "ERROR: Required aux file "$staticFile" not found. Aborting script" | tee -a $logfile 2>&1
+    exit
+fi
+if ! [ -e $voltagesFile ]
+then
+    echo "ERROR: Required aux file "$voltagesFile" not found. Aborting script" | tee -a $logfile 2>&1
+    exit
+fi
+if ! [ -e $currentFile ]
+then
+    echo "ERROR: Required aux file "$currentFile" not found. Aborting script" | tee -a $logfile 2>&1
+    exit
+fi
+
+#next define the format of the grouping file
+
+rm -f $colDescFile
+echo "MEMBER_NAME 60A"      >> $colDescFile
+echo "MEMBER_XTENSION 8A"   >> $colDescFile 
+echo "MEMBER_URI_TYPE 3A"   >> $colDescFile
+echo "MEMBER_LOCATION 256A" >> $colDescFile
+echo "MEMBER_VERSION 1J"    >> $colDescFile
+echo "MEMBER_POSITION 1J"   >> $colDescFile
+echo "DATA_TYPE 60A"	    >> $colDescFile
+
+entries=`find $rawFolder -type f -name '*.fits.gz' | sort`
+calibs=""
+calibFiles=""
+calibDrsFiles=""
+calibDrsPedestalFiles=""
+calibsRoi=""
+calibsRoiTM=""
+
+#data=""
+#correspondingCalib=""
+numCalibs=0
+#numData=0
+
+if [ "$year" == "" ]
+then
+	echo "Missing year argument"
+	exit
+fi
+
+if [ "$month" == "" ]
+then
+	echo "Missing month argument"
+	exit
+fi
+
+if [ "$day" == "" ]
+then
+	echo "Missing day argument"
+	exit
+fi
+
+numEntries=0
+for item in ${entries[@]}
+do
+    numEntries=`expr $numEntries + 1`
+done
+
+echo "There are "$numEntries" entries to examine in folder "$rawFolder | tee -a $logfile 2>&1
+
+#echo "Will now erase entries generated previously..."
+#toDelete=`find $targetFolder -type f -name '*_raw.fits' | sort`
+#for delete in ${toDelete[@]}
+#do
+#	echo "removing "$delete
+#	rm $delete
+#done
+#toDelete=`find $targetFolder -type f -name '*_raw.txt' | sort`
+#for delete in ${toDelete[@]}
+#do
+#	echo "removing $delete"
+#	rm $delete
+#done
+#echo "done"
+
+for entry in ${entries[@]}
+do
+	filename=`echo $entry | grep -E -o '20[1-9][0-9][01][0-9][0-3][0-9]_[0-9]{3}'`
+        filedate=`echo $filename | cut -d_ -f1`
+        runnumberInter=`echo $filename | cut -d_ -f2 | sed -e 's/^0//g' -e 's/^0//g'`
+        runnumber=`printf %03d $runnumberInter`
+        #figure out whether this is a drs calib
+	if [ "`echo $entry | grep drs`" != "" ]
+        then
+		#check if this is the pedestal run. for this, grep the associated raw data file and check for previous calibrations (drs pedestal and drs gain)
+		minusOne=`expr $runnumber - 1`
+		minusOne=`printf %03d $minusOne`
+		minusTwo=`expr $runnumber - 2`
+		minusTwo=`printf %03d $minusTwo`
+		pedestalRaw=$rawFolder/$filename.fits.gz
+		DrsGainRaw=$rawFolder/$filedate"_"$minusOne.fits.gz
+		DrsPedestalRaw=$rawFolder/$filedate"_"$minusTwo.fits.gz
+		DrsGain=$rawFolder/$filedate"_"$minusOne.drs.fits.gz
+		DrsPedestal=$rawFolder/$filedate"_"$minusTwo.drs.fits.gz
+		pedestalkey=""
+		drsGainKey=""
+		drsPedestalKey=""
+		if [ -f $pedestalRaw ]
+		then
+			pedestalKey=`/opt/FACT++/fitsdump -h $pedestalRaw 2>/dev/null | grep "'pedestal'"`
+		fi
+		if [ "$pedestalKey" == "" ]
+		then #the current drs file is NOT a pedestal. Continue
+			continue
+		fi
+#		echo "Found Pedestal entry "$entry
+		if [ -f $DrsGainRaw ] && [ -f $DrsGain ]
+		then
+			drsGainKey=`/opt/FACT++/fitsdump -h $DrsGainRaw 2>/dev/null | grep "'drs-gain'"`
+		else
+			DrsGain=""
+		fi
+		if [ -f $DrsPedestalRaw ] && [ -f $DrsPedestal ]
+		then
+			drsPedestalKey=`/opt/FACT++/fitsdump -h $DrsPedestalRaw 2>/dev/null | grep "'drs-pedestal'"`
+		else
+			DrsPedestal=""
+		fi
+		
+		if [ "$drsPedestalKey" != "" ]
+		then
+			calibtstarti=`/opt/FACT++/fitsdump -h $DrsPedestalRaw  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+		        calibtstartf=`/opt/FACT++/fitsdump -h $DrsPedestalRaw  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+		else
+			if [ "$drsGainKey" != "" ]
+			then
+				calibtstarti=`/opt/FACT++/fitsdump -h $DrsGainRaw  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+			        calibtstartf=`/opt/FACT++/fitsdump -h $DrsGainRaw  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+			else
+				calibtstarti=`/opt/FACT++/fitsdump -h $pedestalRaw  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+		        	calibtstartf=`/opt/FACT++/fitsdump -h $pedestalRaw  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+			fi
+		fi
+		calibtstopi=`/opt/FACT++/fitsdump -h $pedestalRaw  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      		calibtstopf=`/opt/FACT++/fitsdump -h $pedestalRaw  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+
+		auxstarti=`/opt/FACT++/fitsdump -h $voltagesFile 2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+		auxstartf=`/opt/FACT++/fitsdump -h $voltagesFile 2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+		
+		if [ "$auxstarti" == "" ]
+		then
+			auxstarti="0"
+		fi
+		if [ "$auxstartf" == "" ]
+		then
+			auxstartf="0"
+		fi
+		if [ "$calibtstarti" == "" ]
+		then
+			calibtstarti="0"
+		fi
+		if [ "$calibtstartf" == "" ]
+		then
+			calibtstartf="0"
+		fi
+		if [ "$calibtstopi" == "" ]
+		then
+			calibtstopi="0"
+		fi
+		if [ "$calibtstopf" == "" ]
+		then
+			calibtstopf="0"
+		fi
+		
+		auxstart=`echo " $auxstarti + 40587 + $auxstartf " | bc -l`
+		calibtstart=`echo " $calibtstarti + 40587 + $calibtstartf " | bc -l`
+		calibtstop=`echo " $calibtstopi + 40587 + $calibtstopf " | bc -l`
+		iter=0
+      		timeShift=0
+		biasvoltmean=""
+         	tstartaux=`echo "  $calibtstart - 40587 - $timeShift" | bc -l` 
+		compare=`echo " $tstartaux >= $auxstart " | bc -l`
+
+      		while [ "$biasvoltmean" == "" ] && [ "$iter" != "10" ] && [ "$compare" == "1" ]
+      		do
+         		tstartaux=`echo "  $calibtstart - 40587 - $timeShift" | bc -l`
+			compare=`echo " $tstartaux >= $auxstart " | bc -l` 
+         		tstopaux=`echo " $calibtstop - 40587 " | bc -l`   
+         		ftcopy $voltagesFile'[Time> '${tstartaux}' && Time< '${tstopaux}'][col U;Time]' !$tempFits
+         		/opt/FACT++/fitsdump $tempFits -c U[0:319] --stat -o $tempFile 2>/dev/null
+         		biasvoltmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	 		timeShift=`echo " $timeShift + 0.000011574 " | bc -l`
+ 	 		iter=`echo " $iter + 1 " | bc -l`
+		done
+		auxstarti=`/opt/FACT++/fitsdump -h $currentFile 2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+		auxstartf=`/opt/FACT++/fitsdump -h $currentFile 2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+		auxstart=`echo " $auxstarti + $auxstartf " | bc -l`
+		iter=0
+		timeShift=0
+		biascurrentmean=""
+		tstartaux=`echo " $calibtstart - 40587 - $timeShift" | bc -l`
+		compare=`echo " $tstartaux >= $auxstart " | bc -l`
+		while [ "$biascurrentmean" == "" ] && [ "$iter" != "10" ] && [ "$compare" == "1" ]
+		do
+			tstartaux=`echo " $calibtstart - 40587 - $timeShift " | bc -l`
+			compare=`echo " $tstartaux >= $auxstart " | bc -l` 
+        		tstopaux=`echo " $calibtstop - 40587 " | bc -l`       
+        		ftcopy $currentFile'[Time> '${tstartaux}' && Time< '${tstopaux}'][col I;Time]' !$tempFits
+			/opt/FACT++/fitsdump $tempFits -c I[0:319] --stat -o $tempFile 2>/dev/null
+         		biascurrentmean=`grep 'Mea' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'` 
+			timeShift=`echo " $timeShift + 0.000011574 " | bc -l`
+ 			iter=`echo " $iter + 1 " | bc -l`
+		done
+
+		if [ "$biasvoltmean" == "" ]
+		then
+		     	biasvoltmean=0
+		fi
+#		echo "Bias voltage: "$biasvoltmean
+		if [ "$biascurrentmean" == "" ]
+		then
+		     	biascurrentmean=0
+		fi
+		compare=`echo " $biascurrentmean < 5 " | bc -l`
+		if [ "$pedestalKey" != "" ] && [ "$drsGainKey" != "" ] && [ "$drsPedestalKey" != "" ] && [ "$biasvoltmean" == "0" ] && [ "$compare" == "1" ]
+		then
+			calibFiles[$numCalibs]=$entry
+	        	calibs[$numCalibs]=$runnumber
+			calibDrsFiles[$numCalibs]=$DrsGain
+			calibDrsPedestalFiles[$numCalibs]=$DrsPedestal
+			calibsRoi[$numCalibs]=`/opt/FACT++/fitsdump -h -t DrsCalibration $entry  2>/dev/null | grep NROI | grep -v NROITM | grep -E -o '[0-9]{1,4}'`
+			calibsRoiTM[$numCalibs]=`/opt/FACT++/fitsdump -h -t DrsCalibration $entry  2>/dev/null | grep NTM | grep -E -o '[0-9]{1,4}'`
+				#echo "Calib file #"$runnumber" found. Roi=${calibsRoi[$numCalibs]} RoiTM="${calibsRoiTM[$numCalibs]}
+	                numCalibs=`expr $numCalibs + 1`
+		fi
+	       	continue
+        fi
+
+       #if not calib, then look if this is a calibration run (i.e. run that created the drs calib)	
+	correspondingDrs=$rawFolder/$filename.drs.fits.gz
+	if [ -f $correspondingDrs ]
+	then
+		continue
+	fi
+       #if regular run, add its run number, and figure out a proper calibration file
+        data[$numData]=$runnumber
+	calibFound="false"
+	partialCalibFound="false"
+	calibIndex=`expr $numCalibs - 1`
+
+	#if there is no calib available, report error
+	if [ "$numCalibs" == "0" ]
+	then
+		calibFound="notFound"
+	fi
+	while [ "$calibFound" == "false" ]
+	do
+		roi=`/opt/FACT++/fitsdump -h -t Events $entry  2>/dev/null | grep NROI | grep -v NROITM | grep -E -o '[0-9]{1,4}'`
+         	roiTM=`/opt/FACT++/fitsdump -h -t Events $entry  2>/dev/null | grep NROITM | grep -E -o '[0-9]{1,4}'`
+		if [ "$roi" == "${calibsRoi[$calibIndex]}" ] && [ "$roiTM" == "${calibsRoiTM[$calibIndex]}" ]
+		then
+			if [ "${calibDrsFiles[$calibIndex]}" != "" ] && [ "${calibDrsPedestalFiles[$calibIndex]}" != "" ]
+			then
+				calibFound="true"
+				break
+			else
+				if [ "$partialCalibFound" == "false" ]
+				then
+					partialCalibFound=$calibIndex
+				fi
+			fi
+		fi
+		calibIndex=`expr $calibIndex - 1`
+		if [ "$calibIndex" == "-1" ]
+		then
+		  	calibFound="notFound"
+		fi
+	done
+	
+	if [ "$partialCalibFound" != "false" ] && [ "$calibFound" != "true" ]
+	then
+		calibFound=""
+		calibIndex=$partialCalibFound
+	fi
+	
+	if [ "$calibFound" == "true" ]
+	then
+		calibFound="complete"
+	fi
+	
+        if [ "$calibFound" == "notFound" ]
+        then
+		echo "No suitable calibration file could be found for run $runnumber" | tee -a $logfile 2>&1
+		calibFileString="NULL"
+		calibDrsString="NULL"
+		calibDrsPedestalString="NULL"
+	else
+		echo "Found $calibFound calibration "${calibs[$calibIndex]}" for run "$runnumber" with Roi="$roi" and roiTM="$roiTM | tee -a $logfile 2>&1
+		calibFileString=${calibFiles[$calibIndex]}
+		calibDrsString=${calibDrsFiles[$calibIndex]}
+		calibDrsPedestalString=${calibDrsPedestalFiles[$calibIndex]}
+		if [ "$calibDrsString" == "" ]
+		then
+			calibDrsString="NULL"
+		fi
+		if [ "$calibDrsPedestalString" == "" ]
+		then
+			calibDrsPedestalString="NULL"
+		fi
+	fi
+	
+	#File is valid. get its related informations
+	currentEntry=$entry
+	rm -f $keywordsFile
+	rm -f $dataFile
+	writeHeaderKeysToInputFile
+	if [ "$doNotDoThisEntry" == "true" ]
+	then
+		continue
+	fi
+	#Header keys written for raw data. do the same for related aux data
+	echo "Events BINTABLE URL ../../../"$entry" 1 1 Events" >> $dataFile
+	if [ "$calibFileString" != "NULL" ]
+	then
+		echo "DrsCalibration BINTABLE URL ../../../"$calibFileString" 1 1 Pedestal" >> $dataFile
+	fi
+	if [ "$calibDrsString" != "NULL" ]
+	then
+		echo "DrsCalibration BINTABLE URL ../../../"$calibDrsString" 1 1 Drs_Gain" >> $dataFile
+	fi
+	if [ "$calibDrsPedestalString" != "NULL" ]
+	then
+		echo "DrsCalibration BINTABLE URL ../../../"$calibDrsPedestalString" 1 1 Drs_Pedestal" >> $dataFile
+	fi
+	echo "DRIVE_CONTROL_TRACKING_POSITION BINTABLE URL ../../../"$trackingFile" 1 1 Tracking_Position" >> $dataFile
+	echo "FTM_CONTROL_TRIGGER_RATE BINTABLE URL ../../../"$triggerFile" 1 1 Trigger_Rate" >> $dataFile
+	echo "FTM_CONTROL_STATIC_DATA BINTABLE URL ../../../"$staticFile" 1 1 Thresholds" >> $dataFile
+	echo "BIAS_CONTROL_VOLTAGE BINTABLE URL ../../../"$voltagesFile" 1 1 Voltages" >> $dataFile
+	echo "BIAS_CONTROL_CURRENT BINTABLE URL ../../../"$currentFile" 1 1 Currents" >> $dataFile
+		
+	if [ "$calibFound" != "notFound" ]
+	then	
+		#write info to Werner's file
+		if [ "$runtype" == "custom" ]
+		then
+			runtype="custom____________"
+		fi
+		if [ "$runtype" == "data" ]
+		then
+			runtype="data______________"
+		fi
+		if [ "$runtype" == "drs-gain" ]
+		then
+			runtype="drs-gain__________"
+		fi
+		if [ "$runtype" == "drs-gain-ext" ]
+		then
+			runtype="drs-gain-ext______"
+		fi
+		if [ "$runtype" == "drs-pedestal" ]
+		then
+			runtype="drs-pedestal______"
+		fi
+		if [ "$runtype" == "drs-pedestal-ext" ]
+		then
+			runtype="drs-pedestal-ext__"
+		fi
+		if [ "$runtype" == "drs-time" ]
+		then
+			runtype="drs-time__________"
+		fi
+		if [ "$runtype" == "drs-time-delay15" ]
+		then
+			runtype="drs-time-delay15__"
+		fi
+		if [ "$runtype" == "drs-time-delay05" ]
+		then
+			runtype="drs-time-delay05__"
+		fi
+		if [ "$runtype" == "drs-time-delay20" ]
+		then
+			runtype="drs-time-delay20__"
+		fi
+		if [ "$runtype" == "drs-time-upshifted" ]
+		then
+			runtype="drs-time-upshifted"
+		fi
+		if [ "$runtype" == "light-pulser-ext" ]
+		then
+			runtype="light-pulser-ext__"
+		fi
+		if [ "$runtype" == "n/a" ]
+		then
+			runtype="n/a_______________"
+		fi
+		if [ "$runtype" == "ped-and-lp-ext" ]
+		then
+			runtype="ped-and-lp-ex_____"
+		fi
+		if [ "$runtype" == "pedestal" ]
+		then
+			runtype="pedestal__________"
+		fi
+		targetWerner=$targetFolder"/"$year"/"$month
+		if [ ! -d $targetWerner ]
+		then
+			mkdir -p $targetWerner
+		fi
+		wernerFile=$targetWerner"/"$year$month"_001.txt"
+		if [ ! -f $wernerFile ]
+		then
+			echo "creating "$wernerFile
+			echo "#	DRS	DAT	DD	MM	YYYY	TYPE	" >> $wernerFile
+		fi
+		echo "	"${calibs[$calibIndex]}"	"$runnumber"	"$day"	"$month"	"$year"	"$runtype >> $wernerFile
+	fi
+	
+	#now add the "other" slow control files, i.e. the ones that are not mandatory for analysis	
+	auxTable="BIAS_CONTROL_NOMINAL"
+	auxDesc="Bias_Control"
+	writeExtraAuxFile
+	auxTable="BIAS_CONTROL_STATE"
+	auxDesc="Bias_State"	
+	writeExtraAuxFile
+	auxTable="DATA_LOGGER_FILENAME_NIGHTLY"
+	auxDesc="Logger_Filename_Night"
+	writeExtraAuxFile
+	auxTable="DATA_LOGGER_FILENAME_RUN"
+	auxDesc="Logger_Filename_Run"
+	writeExtraAuxFile
+	auxTable="DATA_LOGGER_NUM_SUBS"
+	auxDesc="Logger_num_subs"
+	writeExtraAuxFile
+	auxTable="DATA_LOGGER_STATE"
+	auxDesc="Logger_State"
+	writeExtraAuxFile
+	auxTable="DATA_LOGGER_STATS"
+	auxDesc="Logger_Statistics"
+	writeExtraAuxFile
+	auxTable="DRIVE_CONTROL_POINTING_POSITION"
+	auxDesc="Pointing_Position"
+	writeExtraAuxFile
+	auxTable="DRIVE_CONTROL_STATE"
+	auxDesc="Drive_State"
+	writeExtraAuxFile
+	auxTable="DRIVE_CONTROL_STATUS"
+	auxDesc="Drive_Status"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_DAC"
+	auxDesc="FAD_DAC"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_DNA"
+	auxDesc="FAD_DNA"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_DRS_CALIBRATION"
+	auxDesc="FAD_Drs_Calibration"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_EVENTS"
+	auxDesc="FAD_Events"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_FEEDBACK_DATA"
+	auxDesc="FAD_Feedback_Data"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_FILE_FORMAT"
+	auxDesc="FAD_File_Format"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_FIRMWARE_VERSION"
+	auxDesc="FAD_Firmware_Version"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_PRESCALER"
+	auxDesc="FAD_Prescaler"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_REFERENCE_CLOCK"
+	auxDesc="FAD_Reference_Clock"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_REGION_OF_INTEREST"
+	auxDesc="FAD_ROI"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_RUN_NUMBER"
+	auxDesc="FAD_Run_Number"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_RUNS"
+	auxDesc="FAD_Runs"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_START_RUN"
+	auxDesc="FAD_Start_Run"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_STATE"
+	auxDesc="FAD_State"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_STATISTICS1"
+	auxDesc="FAD_Statistics_1"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_STATISTICS2"
+	auxDesc="FAD_Statistics_2"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_STATS"
+	auxDesc="FAD_Stats"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_STATUS"
+	auxDesc="FAD_Status"
+	writeExtraAuxFile
+	auxTable="FAD_CONTROL_TEMPERATURE"
+	auxDesc="FAD_Temperatures"
+	writeExtraAuxFile
+	auxTable="FEEDBACK_DEVIATION"
+	auxDesc="Feedback_Deviation"
+	writeExtraAuxFile
+	auxTable="FEEDBACK_STATE"
+	auxDesc="Feedback_State"
+	writeExtraAuxFile
+	auxTable="FSC_CONTROL_HUMIDITY"
+	auxDesc="FSC_Humidity"
+	writeExtraAuxFile
+	auxTable="FSC_CONTROL_STATE"
+	auxDesc="FSC_State"
+	writeExtraAuxFile
+	auxTable="FSC_CONTROL_TEMPERATURE"
+	auxDesc="FSC_Temperature"
+	writeExtraAuxFile
+	auxTable="FTM_CONTROL_COUNTER"
+	auxDesc="FTM_Counter"
+	writeExtraAuxFile
+	auxTable="FTM_CONTROL_DYNAMIC_DATA"
+	auxDesc="FTM_Dynamic_Data"
+	writeExtraAuxFile
+	auxTable="FTM_CONTROL_FTU_LIST"
+	auxDesc="FTM_FTU_List"
+	writeExtraAuxFile
+	auxTable="FTM_CONTROL_PASSPORT"
+	auxDesc="FTM_Passeport"
+	writeExtraAuxFile
+	auxTable="FTM_CONTROL_STATE"
+	auxDesc="FTM_State"
+	writeExtraAuxFile
+	auxTable="MAGIC_WEATHER_DATA"
+	auxDesc="MAGIC_Weather_Data"
+	writeExtraAuxFile
+	auxTable="MAGIC_WEATHER_STATE"
+	auxDesc="MAGIC_Weather_State"
+	writeExtraAuxFile
+	auxTable="MCP_STATE"
+	auxDesc="MCP_State"
+	writeExtraAuxFile
+	auxTable="RATE_CONTROL_STATE"
+	auxDesc="Rate_Control_State"
+	writeExtraAuxFile
+	#create the fits file
+	targetNormal=$targetFolder"/"$year"/"$month"/"$day
+	if [ ! -d $targetNormal ]
+	then
+		mkdir -p $targetNormal
+	fi
+	targetFile=$targetNormal"/"$year$month$day"_"$runnumber"_001.fits"
+	if [ -f $targetFile ]
+	then
+		rm $targetFile
+	fi
+	fcreate $colDescFile $dataFile $targetFile "headfile="$keywordsFile
+		echo "Created "$targetFile | tee -a $logfile 2>&1
+
+done
Index: branches/trigger_burst_research/Archive/auxIngest.sh
===================================================================
--- branches/trigger_burst_research/Archive/auxIngest.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/auxIngest.sh	(revision 18288)
@@ -0,0 +1,140 @@
+#!/bin/bash
+
+sourceFolder=$1 
+destFolder=$2 
+suffix=$3
+
+if [ "$#" != "3" ]
+then
+	echo "Please specify source and dest folders and suffix. Aborting"
+	exit
+fi
+
+if [ $1 == "" ]
+then
+	echo "Source folder is empty. Aborting"
+	exit
+fi
+
+if [ $2 == "" ]
+then
+	echo "Dest folder is empty. Aborting"
+	exit
+fi
+
+#first let's make sure that source and dest folders do exist, and that dest is writable
+if [ -d $1 ]
+then
+	sourceFolder=$1
+else
+	echo "Source folder "$1" does not exist (or cannnot be read.) Aborting"
+	exit
+fi
+
+if [ -d $2 ]
+then
+	if [ -d $2"/etiennetest" ]
+	then
+		echo "Test folder already exist. Aborting"
+		exit
+	fi
+	mkdir $2"/etiennetest" 2>/dev/null
+	if [ -d $2"/etiennetest" ]
+	then
+		rm -rf $2"/etiennetest"
+		destFolder=$2
+	else
+		echo "Dest folder is not writable. Aborting"
+		exit
+	fi
+else
+	echo "Dest folder does not exist. Aborting"
+	exit
+fi
+
+#files=`ls $destFolder`
+#if [ "$files" != "" ]
+#then
+#	echo "Dest folder is not empty. Aborting"
+#	exit
+#fi
+sourceFolder=${sourceFolder%/}
+destFolder=${destFolder%/}
+echo "Will start ingesting files from "$sourceFolder" to "$destFolder
+
+#list all the files in sourceFolder, and copy then with the same structure to destfolder
+
+entries=`find $sourceFolder -type f -name '*.fits' | sort`
+
+for entry in ${entries[@]}
+do
+	#first construct the correct file name
+	targetFileName=`correctFileName $entry`
+       #second construct the destination path.
+	filenameonly=${entry##*/}
+	pathonly=${entry%$filenameonly}
+	extrapathonly=${pathonly#$sourceFolder/}
+	targetFolder=$destFolder"/"$extrapathonly
+	if [ ! -d $targetFolder ]
+	then
+		mkdir -p $targetFolder
+	fi
+	
+	#check if the file already exist there
+	targetFile=$targetFolder"/"$targetFileName
+	echo "$targetFile"
+	if [ -a $targetFile ]
+	then
+		echo "File $targetFile already exist. Skipping it" >> report_$suffix.txt
+		continue
+	fi
+	cp $entry $targetFile
+	#if not, do the copying, fixing and checking
+
+#	grouping=`/home/isdc/lyard/FACT++/fitsdump $targetFile -h 2>/dev/null | grep GROUPING`
+	
+#	grouping=`grep 'GROUPING' "temp.txt"`
+		
+#	if [ "$grouping" == "" ]
+#	then
+		
+	repairAuxFile.sh $targetFile ENDerrors_$suffix.txt MJDerror_$suffix.txt report_$suffix.txt processErrors_$suffix.txt
+	
+	if [ -a $targetFile ]
+	then
+		fixAuxKeyWords.sh $targetFile reportTwo_$suffix.txt processErrors_$suffix.txt
+		result=`fverify $targetFile 2>/dev/null | grep '0 error(s)'`
+		if [ "$result" == "" ]
+		then
+			echo "$targetFile" >> stillHasProblems_$suffix.txt
+			rm $targetFile
+		fi
+	fi
+done
+
+#set the correct permissions
+find $destFolder -type f -exec chmod 640 {} \;
+find $destFolder -type d -exec chmod 750 {} \;
+find $destFolder -exec chgrp fact {} \;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: branches/trigger_burst_research/Archive/checkForHeaderUpdatesAux.sh
===================================================================
--- branches/trigger_burst_research/Archive/checkForHeaderUpdatesAux.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/checkForHeaderUpdatesAux.sh	(revision 18288)
@@ -0,0 +1,134 @@
+#!/bin/bash
+
+month=$1
+
+if [ "$month" == "" ]
+then
+	echo "Please give the month to look for as parameter"
+	exit
+fi
+
+archive="/archive/fact/rev_1/aux/2011/"$month
+tempFile="tempFile$month.txt"
+tempFile2="tempFile2$month.txt"
+entries=`find $archive -type f -name '*.fits' | sort`
+
+for entry in ${entries[@]}
+do
+	if [ -f $tempFile ]
+	then
+		rm $tempFile
+	fi
+	if [ -f $tempFile2 ]
+	then
+		rm $tempFile2
+	fi
+	
+	tstartf=1
+	tstartf2=2
+	
+	result=`/opt/FACT++/fitsdump -h $entry > $tempFile`
+	origEntry=`echo $entry | sed -e 's/archive\/fact\/rev_1/data00\/fact-construction/g'`
+	if [ ! -f $origEntry ]
+	then
+		echo $entry >> headerReportAux_$month.txt
+		echo "Name was changed" >> headerReportAux_$month.txt
+		continue
+	fi
+	result2=`/opt/FACT++/fitsdump -h $origEntry > $tempFile2`
+	
+	tstartf=`grep 'TSTARTF' $tempFile | grep -E -o '0\.[0-9]{7}' `
+	tstartf2=`grep 'TSTARTF' $tempFile2 | grep -E -o '0\.[0-9]{7}' `
+	if [ "$tstartf" == "" ]
+	then
+		tstartf=`grep 'TSTARTF' $tempFile | grep -E -o '0'`
+	fi
+	if [ "$tstartf2" == "" ]
+	then
+		tstartf2=`grep 'TSTARTF' $tempFile2 | grep -E -o '0'`
+	fi
+
+	tstarti=`grep 'TSTARTI' $tempFile | grep -E -o '[0-9]{5}' `
+	tstarti2=`grep 'TSTARTI' $tempFile2 | grep -E -o '[0-9]{5}' `
+	tstopf=`grep 'TSTOPF' $tempFile | grep -E -o '0\.[0-9]{7}'`
+	tstopf2=`grep 'TSTOPF' $tempFile2 | grep -E -o '0\.[0-9]{7}'`
+	if [ "$tstopf" == "" ]
+	then
+		tstopf=`grep 'TSTOPF' $tempFile | grep -E -o '0'`
+	fi
+	if [ "$tstopf2" == "" ]
+	then
+		tstopf2=`grep 'TSTOPF' $tempFile2 | grep -E -o '0'`	
+	fi
+	tstopi=`grep 'TSTOPI' $tempFile | grep -E -o '[0-9]{5}'`
+	tstopi2=`grep 'TSTOPI' $tempFile2 | grep -E -o '[0-9]{5}'`
+	dateobs=`grep 'DATE-OBS' $tempFile | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4}`
+	dateobs2=`grep 'DATE-OBS' $tempFile2 | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4}`
+	dateend=`grep 'DATE-END' $tempFile | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4} `
+	dateend2=`grep 'DATE-END' $tempFile2 | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4}` 
+	telescop=`grep 'TELESCOP' $tempFile`
+	telescop2=`grep 'TELESCOP' $tempFile2`
+	package=`grep 'PACKAGE' $tempFile`
+	package2=`grep 'PACKAGE' $tempFile2`
+	origin=`grep 'ORIGIN' $tempFile`
+	origin2=`grep 'ORIGIN' $tempFile2`
+	timeunit=`grep 'TIMEUNIT' $tempFile`
+	timeunit2=`grep 'TIMEUNIT' $tempFile2`
+	mjdref=`grep -E '## +MJDREF' $tempFile`
+	mjdref2=`grep -E '## +MJDREF' $tempFile2`
+	timesys=`grep 'TIMESYS' $tempFile`
+	timesys2=`grep 'TIMESYS' $tempFile2`
+	
+	echo $entry >> headerReportAux_$month.txt
+	echo $entry
+	
+	if [ "$tstarti" != "$tstarti2" ]
+	then
+		echo "TSTARTI. new: "$tstarti" old: "$tstarti2 >> headerReportAux_$month.txt
+	fi
+	if [ "$tstartf" != "$tstartf2" ]
+	then
+		echo "TSTARTF. new: "$tstartf" old: "$tstartf2 >> headerReportAux_$month.txt
+	fi
+	if [ "$tstopi" != "$tstopi2" ]
+	then
+		echo "TSTOPI. new: "$tstopi" old: "$tstopi2 >> headerReportAux_$month.txt
+	fi
+	if [ "$tstopf" != "$tstopf2" ]
+	then
+		echo "TSTOPF. new: "$tstopf" old: "$tstopf2 >> headerReportAux_$month.txt
+	fi
+	if [ "$dateobs" != "$dateobs2" ]
+	then
+		echo "DATEOBS. new: "$dateobs" old: "$dateobs2 >> headerReportAux_$month.txt
+	fi
+	if [ "$dateend" != "$dateend2" ]
+	then
+		echo "DATEEND. new: "$dateend" old: "$dateend2 >> headerReportAux_$month.txt
+	fi
+	if [ "$telescop" != "$telescop2" ]
+	then
+		echo "TELESCOP. new: "$telescop" old: "$telescop2 >> headerReportAux_$month.txt
+	fi
+	if [ "$package" != "$package2" ]
+	then
+		echo "PACKAGE. new: "$package" old: "$package2 >> headerReportAux_$month.txt
+	fi
+	if [ "$origin" != "$origin2" ]
+	then
+		echo "ORIGIN. new: "$origin" old: "$origin2 >> headerReportAux_$month.txt
+	fi
+	if [ "$timeunit" != "$timeunit2" ]
+	then
+		echo "TIMEUNIT. new: "$timeunit" old: "$timeunit2 >> headerReportAux_$month.txt
+	fi
+	if [ "$mjdref" != "$mjdref2" ]
+	then
+		echo "MJDREF. new: "$mjdref" old: "$mjdref2 >> headerReportAux_$month.txt
+	fi
+	if [ "$timesys" != "$timesys2" ]
+	then
+		echo "TIMESYS. new: "$timesys" old: "$timesys2 >> headerReportAux_$month.txt
+	fi
+done
+
Index: branches/trigger_burst_research/Archive/checkForHeaderUpdatesRaw.sh
===================================================================
--- branches/trigger_burst_research/Archive/checkForHeaderUpdatesRaw.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/checkForHeaderUpdatesRaw.sh	(revision 18288)
@@ -0,0 +1,139 @@
+#!/bin/bash
+
+month=$1
+
+if [ "$month" == "" ]
+then
+	echo "Please give the month to look for as parameter"
+	exit
+fi
+
+archive="/archive/fact/rev_1/raw/2011/"$month
+tempFile="tempFile$month.txt"
+tempFile2="tempFile2$month.txt"
+entries=`find $archive -type f -name '*.fits.gz' | sort`
+
+for entry in ${entries[@]}
+do
+	res=`echo $entry | grep 'drs'`
+	if [ "$res" != "" ]
+	then
+		continue
+	fi
+	if [ -f $tempFile ]
+	then
+		rm $tempFile
+	fi
+	if [ -f $tempFile2 ]
+	then
+		rm $tempFile2
+	fi
+	
+	tstartf=1
+	tstartf2=2
+	
+	result=`/opt/FACT++/fitsdump -h $entry > $tempFile`
+	origEntry=`echo $entry | sed -e 's/archive\/fact\/rev_1/data00\/fact-construction/g'`
+	if [ ! -f $origEntry ]
+	then
+		echo $entry >> headerReportRaw_$month.txt
+		echo "Name was changed" >> headerReportRaw_$month.txt
+		continue
+	fi
+	result2=`/opt/FACT++/fitsdump -h $origEntry > $tempFile2`
+	
+	tstartf=`grep 'TSTARTF' $tempFile | grep -E -o '0\.[0-9]{7}' `
+	tstartf2=`grep 'TSTARTF' $tempFile2 | grep -E -o '0\.[0-9]{7}' `
+	if [ "$tstartf" == "" ]
+	then
+		tstartf=`grep 'TSTARTF' $tempFile | grep -E -o '0'`
+	fi
+	if [ "$tstartf2" == "" ]
+	then
+		tstartf2=`grep 'TSTARTF' $tempFile2 | grep -E -o '0'`
+	fi
+
+	tstarti=`grep 'TSTARTI' $tempFile | grep -E -o '[0-9]{5}' `
+	tstarti2=`grep 'TSTARTI' $tempFile2 | grep -E -o '[0-9]{5}' `
+	tstopf=`grep 'TSTOPF' $tempFile | grep -E -o '0\.[0-9]{7}'`
+	tstopf2=`grep 'TSTOPF' $tempFile2 | grep -E -o '0\.[0-9]{7}'`
+	if [ "$tstopf" == "" ]
+	then
+		tstopf=`grep 'TSTOPF' $tempFile | grep -E -o '0'`
+	fi
+	if [ "$tstopf2" == "" ]
+	then
+		tstopf2=`grep 'TSTOPF' $tempFile2 | grep -E -o '0'`	
+	fi
+	tstopi=`grep 'TSTOPI' $tempFile | grep -E -o '[0-9]{5}'`
+	tstopi2=`grep 'TSTOPI' $tempFile2 | grep -E -o '[0-9]{5}'`
+	dateobs=`grep 'DATE-OBS' $tempFile | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4}`
+	dateobs2=`grep 'DATE-OBS' $tempFile2 | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4}`
+	dateend=`grep 'DATE-END' $tempFile | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4} `
+	dateend2=`grep 'DATE-END' $tempFile2 | grep -E -o 201[1-3]-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{4}` 
+	telescop=`grep 'TELESCOP' $tempFile`
+	telescop2=`grep 'TELESCOP' $tempFile2`
+	package=`grep 'PACKAGE' $tempFile`
+	package2=`grep 'PACKAGE' $tempFile2`
+	origin=`grep 'ORIGIN' $tempFile`
+	origin2=`grep 'ORIGIN' $tempFile2`
+	timeunit=`grep 'TIMEUNIT' $tempFile`
+	timeunit2=`grep 'TIMEUNIT' $tempFile2`
+	mjdref=`grep -E '## +MJDREF' $tempFile`
+	mjdref2=`grep -E '## +MJDREF' $tempFile2`
+	timesys=`grep 'TIMESYS' $tempFile`
+	timesys2=`grep 'TIMESYS' $tempFile2`
+	
+	echo $entry >> headerReportRaw_$month.txt
+	echo $entry
+	
+	if [ "$tstarti" != "$tstarti2" ]
+	then
+		echo "TSTARTI. new: "$tstarti" old: "$tstarti2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$tstartf" != "$tstartf2" ]
+	then
+		echo "TSTARTF. new: "$tstartf" old: "$tstartf2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$tstopi" != "$tstopi2" ]
+	then
+		echo "TSTOPI. new: "$tstopi" old: "$tstopi2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$tstopf" != "$tstopf2" ]
+	then
+		echo "TSTOPF. new: "$tstopf" old: "$tstopf2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$dateobs" != "$dateobs2" ]
+	then
+		echo "DATEOBS. new: "$dateobs" old: "$dateobs2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$dateend" != "$dateend2" ]
+	then
+		echo "DATEEND. new: "$dateend" old: "$dateend2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$telescop" != "$telescop2" ]
+	then
+		echo "TELESCOP. new: "$telescop" old: "$telescop2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$package" != "$package2" ]
+	then
+		echo "PACKAGE. new: "$package" old: "$package2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$origin" != "$origin2" ]
+	then
+		echo "ORIGIN. new: "$origin" old: "$origin2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$timeunit" != "$timeunit2" ]
+	then
+		echo "TIMEUNIT. new: "$timeunit" old: "$timeunit2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$mjdref" != "$mjdref2" ]
+	then
+		echo "MJDREF. new: "$mjdref" old: "$mjdref2 >> headerReportRaw_$month.txt
+	fi
+	if [ "$timesys" != "$timesys2" ]
+	then
+		echo "TIMESYS. new: "$timesys" old: "$timesys2 >> headerReportRaw_$month.txt
+	fi
+done
+
Index: branches/trigger_burst_research/Archive/checkRawFileIntegrityAndContent.sh
===================================================================
--- branches/trigger_burst_research/Archive/checkRawFileIntegrityAndContent.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/checkRawFileIntegrityAndContent.sh	(revision 18288)
@@ -0,0 +1,100 @@
+#!/bin/bash
+
+sourceFolder=$1
+archiveFolder=$2
+suffix=$3
+
+#are the arguments subfolders of /data00/fact-construction and /archive/fact ?
+sourceOk=`echo $sourceFolder | sed "s/data00\/fact-construction\/raw/OK/"`
+archiveOk=`echo $archiveFolder | sed "s/archive\/fact\/rev_1\/raw/OK/"`
+
+sourceOk=`echo $sourceOk | grep OK`
+archiveOk=`echo $archiveOk | grep OK`
+
+if [ "$sourceOk" == "" ]
+then
+	echo "source folder not good."
+	exit
+fi
+
+if [ "$archiveOk" == "" ]
+then
+	echo "archive folder not good"
+	exit
+fi
+
+uncompressedFolder=`echo $sourceFolder | sed "s/data00/data03/"`
+
+#do the listing of the files to be checked
+entries=`find $sourceFolder -type f -name '*.fits.gz' | sort`
+
+for entry in ${entries[@]}
+do
+	echo "$entry" >> GGfileSequence$suffix.txt
+	
+	uncompressedEntry=`echo $entry | sed "s/data00/data03/"`
+	uncompressedEntry=`echo $uncompressedEntry | sed "s/.gz//"`
+	archiveEntry=`echo $entry | sed "s/data00\/fact-construction/archive\/fact\/rev_1/"`
+	corruptedFile="0"
+	if [ -f $uncompressedEntry ]
+	then
+		result=`fverify $uncompressedEntry 2>/dev/null | grep '0 error(s)'`
+		if [ "$result" == "" ]
+		then
+			if [ -f $archiveEntry ]
+			then
+				echo "$entry" >> GGcorruptFiles$suffix.txt
+			fi
+			corruptedFile="1"
+		fi
+	else
+		echo "$entry" >> GGmissingUncompressed$suffix.txt
+		echo "$entry is missing uncompressed"
+		continue
+	fi
+	
+	if [ -f $archiveEntry ]
+	then
+		result=`/home_nfs/isdc/lyard/FACT++/fitsCompare $entry $archiveEntry`
+		if [ "$result" == "0" ]
+		then
+			echo "$entry" >> GGidenticalFiles$suffix.txt
+			echo "$entry is fine"
+			continue
+		fi
+		if [ "$result" == "1" ]
+		then
+			echo "$entry" >> GGdifferentFiles$suffix.txt
+			echo "$entry differs from its archived version"
+			continue
+		fi
+		if [ "$result" == "2" ]
+		then
+			echo "$entry" >> GGunexpectedErrors$suffix.txt
+			echo "$entry encountered an unexpected error"
+			continue
+		fi
+		if [ "$result" == "3" ]
+		then
+			echo "$entry" >> GGfineWithMoreRows$suffix.txt
+			echo "$entry is fine (with more rows !"
+			continue
+		fi
+		if [ "$corryuptedFile" == "1"]
+		then
+			echo "$entry is corrupted and produced unkown error $result"
+		else
+			echo "$entry produced unkown error $result"
+		fi	
+		echo "$entry $result" >> GGunknownErrors$suffix.txt
+	else
+		if [ "$corruptedFile" == "1" ]
+		then
+			echo "$entry" >> GGreallymessedup$suffix.txt
+			echo "$entry is really messed up"
+		else
+			echo "$entry" >> GGmissingArchive$suffix.txt
+			echo "$entry is missing archive"
+		fi
+	fi
+done
Index: branches/trigger_burst_research/Archive/correctFileName.cpp
===================================================================
--- branches/trigger_burst_research/Archive/correctFileName.cpp	(revision 18288)
+++ branches/trigger_burst_research/Archive/correctFileName.cpp	(revision 18288)
@@ -0,0 +1,111 @@
+#include <string>
+#include <iostream>
+
+using namespace std;
+
+bool isNumber(char c)
+{
+	switch (c) {
+	case '0':
+	case '1':
+	case '2':
+	case '3':
+	case '4':
+	case '5':
+	case '6':
+	case '7':
+	case '8':
+	case '9':
+		return true;
+		break;
+	default:
+		return false;
+	};
+}
+
+int  main(int argc, char** argv)
+{
+	if (argc != 2)
+		return 0;
+	
+	string c(argv[1]);
+	
+	//get the year, month and day from the path.
+	string year, month, day;
+	string runnumber;
+	string serviceName;
+	int count = c.size()-5;
+	
+	while (count >= 0)
+	{
+		if (c[count+0] == '2' &&
+		    c[count+1] == '0' &&
+		    c[count+2] == '1' &&
+		    c[count+3] == '1' &&
+		    c[count+4] == '/')
+		    {
+		    	year = "2011";
+			month = c.substr(count+5, 2);
+			day = c.substr(count+8, 2);
+			break;
+		    }
+		 count--;
+	}
+	
+	//separate the runnumber from the rest of the filename
+	count = c.size()-6;
+	while (count >= 0)
+	{
+		if (c[count] == '_')
+		{
+			if (isNumber(c[count+1]) &&
+			    isNumber(c[count+2]) &&
+			    isNumber(c[count+3]))
+			    {
+			    	runnumber=c.substr(count+1, 3);
+				break;
+			    }
+		}
+		else
+		{
+			if (c[count+0] != '0' && 
+			    c[count+1] == '0' &&
+			    c[count+2] == '0' &&
+			    c[count+3] == '0' &&
+			    c[count+4] == '0' &&
+			    c[count+5] == '0')
+			    {
+			    	runnumber=c.substr(count+6, 3);
+				break;
+			    }
+		}
+		count--;
+	}	
+	
+	//figure out the service name
+	count = c.size()-3;
+	while (count > 0)
+	{
+		if ((c[count] == '_' || c[count] == '.') &&
+		    !isNumber(c[count+1]) &&
+		    isNumber(c[count-1]))
+		{
+			serviceName = c.substr(count+1, c.size()-(count+6));
+		}
+		count--;
+	}
+	if (serviceName == "fits")
+		serviceName = "";
+	
+	
+	cout << year << month << day;
+	if (runnumber != "") cout << "_";
+	cout << runnumber;
+	if (serviceName != "") cout << ".";
+	cout << serviceName << ".fits";
+//	cout << "year: " << year << " month: " << month << " day: " << day << endl;
+//	cout << "run number: " << runnumber << " service name: " << serviceName << endl;
+
+return 0;
+
+}
Index: branches/trigger_burst_research/Archive/correctFileName.cxx
===================================================================
--- branches/trigger_burst_research/Archive/correctFileName.cxx	(revision 18288)
+++ branches/trigger_burst_research/Archive/correctFileName.cxx	(revision 18288)
@@ -0,0 +1,111 @@
+#include <string>
+#include <iostream>
+
+using namespace std;
+
+bool isNumber(char c)
+{
+	switch (c) {
+	case '0':
+	case '1':
+	case '2':
+	case '3':
+	case '4':
+	case '5':
+	case '6':
+	case '7':
+	case '8':
+	case '9':
+		return true;
+		break;
+	default:
+		return false;
+	};
+}
+
+int  main(int argc, char** argv)
+{
+	if (argc != 2)
+		return 0;
+	
+	string c(argv[1]);
+	
+	//get the year, month and day from the path.
+	string year, month, day;
+	string runnumber;
+	string serviceName;
+	int count = c.size()-5;
+	
+	while (count >= 0)
+	{
+		if (c[count+0] == '2' &&
+		    c[count+1] == '0' &&
+		    c[count+2] == '1' &&
+		    c[count+3] == '1' &&
+		    c[count+4] == '/')
+		    {
+		    	year = "2011";
+			month = c.substr(count+5, 2);
+			day = c.substr(count+8, 2);
+			break;
+		    }
+		 count--;
+	}
+	
+	//separate the runnumber from the rest of the filename
+	count = c.size()-6;
+	while (count >= 0)
+	{
+		if (c[count] == '_')
+		{
+			if (isNumber(c[count+1]) &&
+			    isNumber(c[count+2]) &&
+			    isNumber(c[count+3]))
+			    {
+			    	runnumber=c.substr(count+1, 3);
+				break;
+			    }
+		}
+		else
+		{
+			if (c[count+0] != '0' && 
+			    c[count+1] == '0' &&
+			    c[count+2] == '0' &&
+			    c[count+3] == '0' &&
+			    c[count+4] == '0' &&
+			    c[count+5] == '0')
+			    {
+			    	runnumber=c.substr(count+6, 3);
+				break;
+			    }
+		}
+		count--;
+	}	
+	
+	//figure out the service name
+	count = c.size()-3;
+	while (count > 0)
+	{
+		if ((c[count] == '_' || c[count] == '.') &&
+		    !isNumber(c[count+1]) &&
+		    isNumber(c[count-1]))
+		{
+			serviceName = c.substr(count+1, c.size()-(count+6));
+		}
+		count--;
+	}
+	if (serviceName == "fits")
+		serviceName = "";
+	
+	
+	cout << year << month << day;
+	if (runnumber != "") cout << "_";
+	cout << runnumber;
+	if (serviceName != "") cout << ".";
+	cout << serviceName << ".fits";
+//	cout << "year: " << year << " month: " << month << " day: " << day << endl;
+//	cout << "run number: " << runnumber << " service name: " << serviceName << endl;
+
+return 0;
+
+}
Index: branches/trigger_burst_research/Archive/fitsCompare.cpp
===================================================================
--- branches/trigger_burst_research/Archive/fitsCompare.cpp	(revision 18288)
+++ branches/trigger_burst_research/Archive/fitsCompare.cpp	(revision 18288)
@@ -0,0 +1,214 @@
+//compile with:
+//g++44 -std=c++0x -o fitsCompare -DHAVE_ZLIB=1 -lz dataChecker.cpp
+#include <string.h>
+#include <fstream>
+#include <map>
+#include <unordered_map>
+
+using namespace std;
+//#include <iostream>
+
+#include "externals/fits.h"
+
+using namespace std;
+
+ /*
+ *	Usage: program-name <file1> <file2> <optional -v for verbose output>
+ *
+ *	
+ *
+ *
+ *
+ *
+ */
+ 
+ fits* file1;
+ fits* file2;
+ char* file1Data;
+ char* file2Data;
+
+ int customReturn(int code)
+ {
+ 	if (file1)
+	{
+		file1->close();
+		delete file1;
+	}
+	if (file2)
+	{
+		file2->close();
+		delete file2;
+	}
+	if (file1Data)
+	{
+		delete[] file1Data;
+	}
+	if (file2Data)
+	{
+		delete[] file2Data;
+	}
+	exit(code);
+ }
+ 
+int main(int argc, char** argv)
+{
+	if (argc < 3)
+		return -1;
+		
+	bool verbose=false;
+	
+	if (argc > 3 &&
+	    !strcmp(argv[3], "-v"))
+	    verbose=true;
+	    
+	string filename1(argv[1]);
+	string filename2(argv[2]);
+	file1=NULL;
+	file2=NULL;
+	file1Data=NULL;
+	file2Data=NULL;
+	try
+	{
+		file1 = new fits(filename1);
+		file2 = new fits(filename2);
+	}
+	catch (std::runtime_error e)
+	{
+		if (verbose)
+			cout << "Could not open at least one of the two files." << endl;
+		else
+			cout << 2 << endl;
+		return -1;
+	}	
+	
+	//get the columns in the file. 
+	fits::Table::Columns columns1=file1->GetColumns();
+	fits::Table::Columns columns2=file2->GetColumns();
+	
+	if (columns1.size() != columns2.size())
+	{
+		if (verbose)
+			cout << "Different number of columns" << endl;
+		else
+			cout << "1" << endl;
+		customReturn(-1);
+	}
+	
+	long totalSize=0;
+	for (auto it=columns1.begin(), jt=columns2.begin(); it != columns1.end(); it++, jt++)
+	{
+		if (it->first != jt->first)
+		{
+			if (verbose)
+				cout << "Different column names" << endl;
+			else
+				cout << "1" << endl;
+			customReturn(-1);
+		}
+		if ((it->second.offset != jt->second.offset) ||
+		    (it->second.num != jt->second.num) ||
+		    (it->second.size != jt->second.size) ||
+		    (it->second.type != jt->second.type) ||
+		    (it->second.unit != jt->second.unit))
+		{
+			if (verbose)
+				cout << "Different column def" << endl;
+			else
+				cout << "1" << endl;
+			customReturn(-1);
+		}
+		
+		totalSize += it->second.size * it->second.num;
+
+	}
+	
+	char* file1Data = new char[totalSize];
+	char* file2Data = new char[totalSize];
+	
+	if ((sizeof(long) != 8) ||
+	    (sizeof(int) != 4) ||
+	    (sizeof(short) != 2))
+	    {
+	    	if (verbose)
+		    	cout << "OS IS NOT SUITABLE (32bits ?) please use a 64 bits system" << endl;
+		else
+			cout << "2" << endl;
+		customReturn(-1);
+	    }
+	
+	for (auto it=columns1.begin(); it != columns1.end(); it++)
+	{
+		switch (it->second.size) {
+		case 1:
+			file1->SetPtrAddress(it->first, &file1Data[it->second.offset], it->second.num);
+			file2->SetPtrAddress(it->first, &file2Data[it->second.offset], it->second.num);
+			break;
+		case 2:
+			file1->SetPtrAddress(it->first, (short*)(&file1Data[it->second.offset]), it->second.num);
+			file2->SetPtrAddress(it->first, (short*)(&file2Data[it->second.offset]), it->second.num);
+			break;
+		case 4:
+			file1->SetPtrAddress(it->first, (int*)(&file1Data[it->second.offset]), it->second.num);
+			file2->SetPtrAddress(it->first, (int*)(&file2Data[it->second.offset]), it->second.num);
+			break;
+		case 8:
+			file1->SetPtrAddress(it->first, (long*)(&file1Data[it->second.offset]), it->second.num);
+			file2->SetPtrAddress(it->first, (long*)(&file2Data[it->second.offset]), it->second.num);
+			break;
+		default:
+			if  (verbose)
+				cout << "Unkown column element size: " << it->second.size << endl;
+			else
+				cout << "2" << endl;
+			customReturn(-1);
+		};
+	}
+	
+	int numRows1 = file1->GetInt("NAXIS2");
+	int numRows2 = file2->GetInt("NAXIS2");
+	if (numRows1 > numRows2)
+	{
+		if (verbose)
+			cout << "looks like the files has different number of rows: " << numRows1 << " vs " << numRows2 << endl;
+		else
+			cout << "1" << endl;
+		customReturn(0);
+	}
+	int row=0;
+	if (verbose)
+		cout << "files have " << numRows1 << " rows" << endl << endl;
+	while (file1->GetNextRow() &&
+		file2->GetNextRow() &&
+		row < numRows1) 
+	{
+		if (verbose)
+		{
+			cout << "\rrow: " << row;
+			cout.flush();
+		}
+		for (int i=0;i<totalSize;i++)
+		{
+			if (file1Data[i] != file2Data[i])
+			{
+				if (verbose)
+					cout << "Files differ... i: " << i << " " << file1Data[i] << " " << file2Data[i] << endl;
+				else
+					cout << "1" << endl;
+				customReturn(0);
+			}
+		}
+		row++;
+	}
+	if (numRows1 != numRows2)
+	{
+		if (verbose)
+			cout << "Archive has more rows. orig. data is fine" << endl;
+		else
+			cout << "3" << endl;
+	}
+	if (verbose)
+		cout << "Files data is identical" << endl;
+	else
+		cout << "0" << endl;
+	customReturn(0);
+}
Index: branches/trigger_burst_research/Archive/fixAuxKeyWords.sh
===================================================================
--- branches/trigger_burst_research/Archive/fixAuxKeyWords.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/fixAuxKeyWords.sh	(revision 18288)
@@ -0,0 +1,147 @@
+#!/bin/bash
+
+if [ "$#" != 3 ]
+then
+	echo "Error: fixAuxKeyWords.sh should be called with 2 arguments. Aborting."
+	exit
+fi
+
+file=$1
+tempFile="temp.txt"
+reportFile=$2
+processErrors=$3
+
+if [ -a $tempFile ]
+then
+	rm $tempFile 2>/dev/null
+fi
+
+#get current keywords value
+result=`/home/isdc/lyard/FACT++/fitsdump $file -h -o $tempFile 2>/dev/null`
+
+if [ -a $tempFile ]
+then
+	timesys=`grep 'TIMESYS' $tempFile | grep -E -o 'UTC'`
+	mjdref=`grep 'MJDREF' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstarti=`grep 'TSTARTI' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstartf=`grep 'TSTARTF' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstopi=`grep 'TSTOPI' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstopf=`grep 'TSTOPF' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	date_obs=`grep 'DATE-OBS' $tempFile`
+	date_end=`grep 'DATE-END' $tempFile`
+	telescope=`grep 'TELESCOP' $tempFile`
+	package=`grep 'PACKAGE' $tempFile`
+	origin=`grep 'ORIGIN' $tempFile`
+	timeunit=`grep 'TIMEUNIT' $tempFile`
+
+else
+	echo "Could not list keywords in $file" >> $processErrors
+	exit
+fi
+
+rm $tempFile
+
+#retrieve the start and stop time from the data itself
+result=`/home/isdc/lyard/FACT++/fitsdump $file -c Time --minmax --nozero -o $tempFile 2>/dev/null`
+
+if [ -a $tempFile ]
+then
+
+	tstart=`grep 'min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstop=`grep 'max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstarti2=`echo $tstart | grep -E -o '[-]*[0-9]*[.]' | grep -E -o '[-]*[0-9]*'`
+	tstartf2=`echo $tstart | grep -E -o '[.][0-9]+'`
+	if [ "$tstarti2" == "" ] #no decimal part
+	then
+		tstarti2=$tstart
+	fi
+	tstartf2="0"$tstartf2
+	tstopi2=`echo $tstop | grep -E -o '[-]*[0-9]*[.]' | grep -E -o '[-]*[0-9]*'`
+	tstopf2=`echo $tstop | grep -E -o '[.][0-9]+'`
+	if [ "$tstopi2" == "" ] #no decimal part
+	then
+		tstopi2=$tstop
+	fi
+	tstopf2="0"$tstopf2
+
+else
+#	echo "Could not minmax $file" >> $processErrors
+	exit
+fi
+
+rm $tempFile
+#output the values to be added/updated to temp text files
+modified="false"
+if [ "$telescope" == "" ]
+then
+	echo "TELESCOP FACT / Telescope that acquired this data" >> $tempFile
+	modified="true"
+	echo "TELESCOP in $file" >> $2
+fi
+if [ "$package" == "" ]
+then
+	echo "PACKAGE FACT++ / Package name" >> $tempFile
+	modified="true"
+	echo "PACKAGE in $file" >> $2
+fi
+if [ "$origin" == "" ]
+then
+	echo "ORIGIN FACT / Institution that wrote the file" >> $tempFile
+	modified="true"
+	echo "ORIGIN in $file" >> $2
+fi
+if [ "$timeunit" == "" ]
+then
+	echo "TIMEUNIT d / Time given in days w.r.t. to MJDREF" >> $tempFile
+	modified="true"
+	echo "TIMEUNIT in $file" >> $2
+fi
+if [ "$mjdref" != "40587" ]
+then
+	mjdref="40587"
+	echo "MJDREF "$mjdref" / Store times in UNIX time (sec from 1970ish)" >> $tempFile
+	modified="true"
+	echo "MJDREF in $file" >> $2
+fi
+if [ "$timesys" != "UTC" ]
+then
+	timesys="UTC"
+	echo "TIMESYS "$timesys" / Time system" >> $tempFile
+	modified="true"
+	echo "TIMESYS in $file" >> $2
+fi
+if [ "$tstarti2" != "$tstarti" ] || [ "$tstartf2" != "$tstartf" ]
+then
+	echo "TSTARTI "$tstarti2" / Time when first event received (integral part)" >> $tempFile
+	echo "TSTARTF "$tstartf2" / Time when first event received (fractional part)" >> $tempFile
+	date_obs2=`echo "$tstarti2 + $tstartf2 + $mjdref" | bc -l`
+	date_obs2=`/home/isdc/lyard/FACT++/MjDtoISO $date_obs2`
+	echo "DATE-OBS "$date_obs2" / Time when first event was received" >> $tempFile
+	modified="true"
+	echo "TSTART in $file" >> $2
+fi
+
+if [ "$tstopi2" != "$tstopi" ] || [ "$tstopf2" != "$tstopf" ]
+then
+	echo "TSTOPI "$tstopi2" / Time when last event received (integral part)" >> $tempFile
+	echo "TSTOPF "$tstopf2" / Time when last event received (fractional part)" >> $tempFile
+	date_end2=`echo "$tstopi2 + $tstopf2 + $mjdref" | bc -l`
+	date_end2=`/home/isdc/lyard/FACT++/MjDtoISO $date_end2`
+	echo "DATE-END "$date_end2" / Time when last event was received" >> $tempFile
+	modified="true"
+	echo "TSTOP  in $file" >> $2
+fi
+
+if [ "$modified" == "true" ]
+then
+	echo "INGEST v0.1 Version of Etienne ingest script" >> $tempFile
+fi
+
+if [ -a $tempFile ]
+then
+	fmodhead $file $tempFile 2>&1 1>/dev/null
+fi
+
+fchecksum $file update+ 2>&1 1>/dev/null
+
+rm $tempFile 2>/dev/null
Index: branches/trigger_burst_research/Archive/fixRawKeyWords.sh
===================================================================
--- branches/trigger_burst_research/Archive/fixRawKeyWords.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/fixRawKeyWords.sh	(revision 18288)
@@ -0,0 +1,181 @@
+#!/bin/bash
+
+if [ "$#" != 3 ]
+then
+	echo "Error: fixAuxKeyWords.sh should be called with 2 arguments. Aborting."
+	exit
+fi
+
+file=$1
+tempFile="/scratch/tempRaw.txt"
+reportFile=$2
+processErrors=$3
+
+if [ -a $tempFile ]
+then
+	rm $tempFile 2>/dev/null
+fi
+
+#get current keywords value
+result=`/home_nfs/isdc/lyard/FACT++/fitsdump $file -h -o $tempFile 2>/dev/null`
+
+if [ -a $tempFile ]
+then
+	timesys=`grep 'TIMESYS' $tempFile | grep -E -o 'UTC'`
+	mjdref=`grep 'MJDREF' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstarti=`grep 'TSTARTI' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstartf=`grep 'TSTARTF' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstopi=`grep 'TSTOPI' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstopf=`grep 'TSTOPF' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	date_obs=`grep 'DATE-OBS' $tempFile`
+	date_end=`grep 'DATE-END' $tempFile`
+	telescope=`grep 'TELESCOP' $tempFile`
+	package=`grep 'PACKAGE' $tempFile`
+	origin=`grep 'ORIGIN' $tempFile`
+	timeunit=`grep 'TIMEUNIT' $tempFile`
+else
+	echo "Could not list keywords in $file" >> $processErrors
+	exit
+fi
+
+rm $tempFile
+
+#retrieve the start and stop time from the data itself
+result=`/home_nfs/isdc/lyard/FACT++/fitsdump $file -c UnixTimeUTC --minmax --nozero -o $tempFile 2>/dev/null`
+
+if [ -a $tempFile ]
+then
+	tstart=`grep 'min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstop=`grep 'max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+	tstarti2=`echo $tstart | grep -E -o '[-]*[0-9]*[.]' | grep -E -o '[-]*[0-9]*'`
+	tstartf2=`echo $tstart | grep -E -o '[.][0-9]+'`
+	tstartf2="0"$tstartf2
+	tstopi2=`echo $tstop | grep -E -o '[-]*[0-9]*[.]' | grep -E -o '[-]*[0-9]*'`
+	tstopf2=`echo $tstop | grep -E -o '[.][0-9]+'`
+	tstopf2="0"$tstopf2
+else
+	#let's give it a shot with PCTime
+	result=`/home_nfs/isdc/lyard/FACT++/fitsdump $file -c PCTime --minmax --nozero -o $tempFile 2>/dev/null`
+	if [ -a $tempFile ]
+	then
+		tstart=`grep 'min' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+		tstop=`grep 'max' $tempFile | grep -E -o '[-]*[0-9]*[.]*[0-9]+'`
+		tstarti2=`echo $tstart | grep -E -o '[-]*[0-9]*[.]' | grep -E -o '[-]*[0-9]*'`
+		tstartf2=`echo $tstart | grep -E -o '[.][0-9]+'`
+		if [ "$tstarti2" == "" ] #no decimal part
+		then
+			tstarti2=$tstart
+		fi
+		tstartf2="0"$tstartf2
+		tstopi2=`echo $tstop | grep -E -o '[-]*[0-9]*[.]' | grep -E -o '[-]*[0-9]*'`
+		tstopf2=`echo $tstop | grep -E -o '[.][0-9]+'`
+		if [ "$tstopi2" == "" ] #no decimal part
+		then
+			tstopi2=$tstop
+		fi
+		tstopf2="0"$tstopf2
+	else
+#	echo "Could not minmax $file" >> $processErrors
+		exit
+	fi
+fi
+
+rm $tempFile
+#output the values to be added/updated to temp text files
+modified="false"
+if [ "$telescope" == "" ]
+then
+	echo "TELESCOP FACT / Telescope that acquired this data" >> $tempFile
+	modified="true"
+	echo "TELESCOP in $file" >> $2
+fi
+if [ "$package" == "" ]
+then
+	echo "PACKAGE FACT++ / Package name" >> $tempFile
+	modified="true"
+	echo "PACKAGE in $file" >> $2
+fi
+if [ "$origin" == "" ]
+then
+	echo "ORIGIN FACT / Institution that wrote the file" >> $tempFile
+	modified="true"
+	echo "ORIGIN in $file" >> $2
+fi
+if [ "$timeunit" == "" ]
+then
+	echo "TIMEUNIT d / Time given in days w.r.t. to MJDREF" >> $tempFile
+	modified="true"
+	echo "TIMEUNIT in $file" >> $2
+fi
+if [ "$mjdref" == "" ]
+then
+#	mjdref="0"
+	mjdref="40587"
+	echo "MJDREF "$mjdref" / Store times in UNIX time (sec from 1970ish)" >> $tempFile
+	modified="true"
+	echo "MJDREF in $file" >> $2
+fi
+if [ "$timesys" != "UTC" ]
+then
+	timesys="UTC"
+	echo "TIMESYS "$timesys" / Time system" >> $tempFile
+	modified="true"
+	echo "TIMESYS in $file" >> $2
+fi
+#adapt the start and stop to the mjdref (it is raw unix time in the data)
+if [ "$tstarti2" != "0" ] && [ "$tstarti2" != "" ]
+then
+	tstarti2=`echo "$tstarti2 - $mjdref" | bc -l`
+	tstopi2=`echo "$tstopi2 - $mjdref" | bc -l`
+else
+	tstarti2=0
+	tstopi2=0
+fi
+
+#give latitude for 10-6 precision in tstart and tstop
+tfcompare=`echo $tstartf | grep -E -o '0\.[0-9]{6}'`
+tfcompare2=`echo $tstartf2 | grep -E -o '0\.[0-9]{6}'`
+
+if [ "$tstarti2" != "$tstarti" ] || [ "$tfcompare" != "$tfcompare2" ] 
+then
+	echo "TSTARTI "$tstarti2" / Time when first event received (integral part)" >> $tempFile
+	echo "TSTARTF "$tstartf2" / Time when first event received (fractional part)" >> $tempFile
+	date_obs2=`echo "$tstarti2 + $tstartf2 + $mjdref" | bc -l`
+	date_obs2=`/home_nfs/isdc/lyard/FACT++/MjDtoISO $date_obs2`
+	echo "DATE-OBS "$date_obs2" / Time when first event was received" >> $tempFile
+	modified="true"
+	echo "TSTART in $file" >> $2
+fi
+
+tfcompare=`echo $tstopf | grep -E -o '0\.[0-9]{6}'`
+tfcompare2=`echo $tstopf2 | grep -E -o '0\.[0-9]{6}'`
+
+if [ "$tstopi2" != "$tstopi" ] || [ "$tfcompare" != "$tfcompare2" ]
+then
+
+	echo "first: $tfcompare |||  second: $tfcompare2||"
+	echo "TSTOPI "$tstopi2" / Time when last event received (integral part)" >> $tempFile
+	echo "TSTOPF "$tstopf2" / Time when last event received (fractional part)" >> $tempFile
+	date_end2=`echo "$tstopi2 + $tstopf2 + $mjdref" | bc -l`
+	date_end2=`/home_nfs/isdc/lyard/FACT++/MjDtoISO $date_end2`
+	echo "DATE-END "$date_end2" / Time when last event was received" >> $tempFile
+	modified="true"
+	echo "TSTOP  in $file" >> $2
+fi
+
+if [ "$modified" == "true" ]
+then
+	echo "INGEST v0.2 Version of Etienne ingest script" >> $tempFile
+	echo $file" header has been modified" >> $reportFile
+else
+	echo $file" header has NOT been modified" >> $reportFile
+fi
+
+if [ -a $tempFile ]
+then
+	fmodhead $file $tempFile 2>&1 1>/dev/null
+fi
+
+fchecksum $file update+ 2>&1 1>/dev/null
+
+rm $tempFile 2>/dev/null
Index: branches/trigger_burst_research/Archive/ingest.sh
===================================================================
--- branches/trigger_burst_research/Archive/ingest.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/ingest.sh	(revision 18288)
@@ -0,0 +1,49 @@
+sourceAux="/data00/fact-construction/aux"
+destAux="/archive/fact/rev_1/aux"
+
+sourceRaw="/data03/fact-construction/raw"
+destRaw="/archive/fact/rev_1/raw"
+
+ok="true"
+
+if [ -d $sourceAux ]
+then
+	echo "Source aux: "$sourceAux
+else
+	echo "Source aux DOES NOT EXIST"
+	ok="false"
+fi
+
+if [ -d $destAux ]
+then 
+	echo "Dest aux:   "$destAux
+else
+	echo "Dest aux DOES NOT EXIST"
+	ok="false"
+fi
+
+if [ -d $sourceRaw ]
+then
+	echo "Source raw: "$sourceRaw
+else
+	echo "Source raw DOES NOT EXIST"
+	ok="false"
+fi
+
+if [ -d $destRaw ]
+then
+	echo "Dest raw:   "$destRaw
+else
+	echo "Dest raw DOES NOT EXIST"
+	ok="false"
+fi
+
+if [ "$ok" == "true" ]
+then
+	#echo "Here I should be starting both scripts"
+	auxIngest.sh $sourceAux $destAux
+	rawIngest.sh $sourceRaw $destRaw
+else
+	echo "Something went wrong with folders. Please check them"
+fi
+	
Index: branches/trigger_burst_research/Archive/rawIngest.sh
===================================================================
--- branches/trigger_burst_research/Archive/rawIngest.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/rawIngest.sh	(revision 18288)
@@ -0,0 +1,150 @@
+#!/bin/bash
+
+sourceFolder=$1 
+destFolder=$2 
+
+if [ "$#" != "3" ]
+then
+	echo "Please specify source and dest folders. and an identifier for the log files Aborting"
+	exit
+fi
+
+if [ $1 == "" ]
+then
+	echo "Source folder is empty. Aborting"
+	exit
+fi
+
+if [ $2 == "" ]
+then
+	echo "Dest folder is empty. Aborting"
+	exit
+fi
+
+if [ $3 == "" ]
+then 
+	echo "Identifier for log files empty. Aborting"
+	exit
+fi
+
+#first let's make sure that source and dest folders do exist, and that dest is writable
+if [ -d $1 ]
+then
+	sourceFolder=$1
+else
+	echo "Source folder "$1" does not exist (or cannnot be read.) Aborting"
+	exit
+fi
+
+if [ -d $2 ]
+then
+	if [ -d $2"/etiennetest" ]
+	then
+		echo "Test folder already exist. Aborting"
+		exit
+	fi
+	mkdir $2"/etiennetest" 2>/dev/null
+	if [ -d $2"/etiennetest" ]
+	then
+		rm -rf $2"/etiennetest"
+		destFolder=$2
+	else
+		echo "Dest folder is not writable. Aborting"
+		exit
+	fi
+else
+	echo "Dest folder does not exist. Aborting"
+	exit
+fi
+
+#files=`ls $destFolder`
+#if [ "$files" != "" ]
+#then
+#	echo "Dest folder is not empty. Aborting"
+#	exit
+#fi
+sourceFolder=${sourceFolder%/}
+destFolder=${destFolder%/}
+echo "Will start ingesting files from "$sourceFolder" to "$destFolder
+echo "Will start ingesting files from "$sourceFolder" to "$destFolder >> Rawreport$3.txt
+
+#list all the files in sourceFolder, and copy then with the same structure to destfolder
+
+entries=`find $sourceFolder -type f -name '*.fits' | sort`
+
+for entry in ${entries[@]}
+do
+       #second construct the destination path.
+	filenameonly=${entry##*/}
+	#first construct the correct file name
+	targetFileName=$filenameonly
+	pathonly=${entry%$filenameonly}
+	extrapathonly=${pathonly#$sourceFolder/}
+	targetFolder=$destFolder"/"$extrapathonly
+	if [ ! -d $targetFolder ]
+	then
+		mkdir -p $targetFolder
+	fi
+	
+	#check if the file already exist there
+	targetFile=$targetFolder"/"$targetFileName
+	interFile="/scratch/"$targetFileName
+	echo "$targetFile"
+#	echo "$interFile"
+	if [ -a $targetFile".gz" ]
+	then
+		echo "File $targetFile already exist. Skipping it" >> Rawreport$3.txt
+		continue
+	fi
+	cp $entry $interFile
+	#if not, do the copying, fixing and checking
+
+#	grouping=`/home/isdc/lyard/FACT++/fitsdump $targetFile -h 2>/dev/null | grep GROUPING`
+	
+#	grouping=`grep 'GROUPING' "temp.txt"`
+		
+#	if [ "$grouping" == "" ]
+#	then
+		
+	repairRawFile.sh $interFile RawENDerrors$3.txt RawMJDerror$3.txt Rawreport$3.txt RawprocessErrors$3.txt
+	
+	if [ -a $interFile ]
+	then
+		fixRawKeyWords.sh $interFile RawreportTwo$3.txt RawprocessErrors$3.txt
+		result=`fverify $interFile 2>/dev/null | grep '0 error(s)'`
+		if [ "$result" == "" ]
+		then
+			echo "$interFile" >> RawstillHasProblems$3.txt
+			rm $interFile
+		else
+			gzip -1 $interFile
+			cp $interFile".gz" $targetFile".gz"
+			rm $interFile".gz"
+		fi
+	fi
+done
+
+#set the correct permissions
+find $destFolder -type f -exec chmod 640 {} \;
+find $destFolder -type d -exec chmod 750 {} \;
+find $destFolder -exec chgrp fact {} \;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: branches/trigger_burst_research/Archive/repairAuxFile.sh
===================================================================
--- branches/trigger_burst_research/Archive/repairAuxFile.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/repairAuxFile.sh	(revision 18288)
@@ -0,0 +1,81 @@
+#!/bin/bash
+#sourceFolder="./backup"
+#destFolder="./fixed"
+
+#entries=`find $sourceFolder -type f -name '*.fits' | sort`
+
+#for entry in ${entries[@]}
+#do
+#	echo $entry" "$destFolder
+#	cp $entry $destFolder
+#done
+
+#entries=`find $destFolder -type f -name '*.fits' | sort`
+
+#for entry in ${entries[@]}
+#do
+
+if [ "$#" != "5" ]
+then
+	echo "Error: repairAuxFile.sh should be called with 4 arguments please"
+	exit
+fi
+entry=$1
+errorFile=$2
+wrongMjdFile=$3
+reportFile=$4
+processErrorFile=$5
+	#verify file with fverify
+	result=`fverify $entry 2>/dev/null | grep '0 error(s)'`
+	if [ "$result" != "" ]
+	then
+		mjdref=`/home/isdc/lyard/FACT++/fitsdump $entry -h 2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]+'`
+		if [ "$mjdref" != 40587 ]
+		then
+			echo "$entry" >> $wrongMjdFile
+			result=""
+		fi
+	else
+		echo "$entry" >> $errorFile
+	fi
+	
+	if [ "$result" == "" ]
+	then
+	#fix it !
+#		echo $entry
+		headerSize=`ENDfixer $entry 2>/dev/null`
+		echo "Fixed END $entry" >> $reportFile
+		if [ "$headerSize" == "Error: header length not acceptable" ] || [ "$headerSize" == "Error: too much header space after END keyword" ] 
+		then 
+			echo "File "$entry" looks really messed up: "$headerSize >> $errorFile
+			exit
+		else 
+			filesize=`stat -c%s $entry`
+#			echo $headerSize" "$filesize
+			numrows=`/home/isdc/lyard/FACT++/fitsdump $entry -h 2>/dev/null | grep 'NAXIS2' | awk '{ print $4 }' | grep -E -o '[0-9]+'`
+			if [ "$numrows" == "" ]
+			then
+				echo "Cannot fitsdump $entry" >> $processErrorFile
+				rm $entry
+				exit
+			fi
+			rowWidth=`/home/isdc/lyard/FACT++/fitsdump $entry -h 2>/dev/null | grep 'NAXIS1' | awk '{ print $4 }' | grep -E -o '[0-9]+'`
+			
+			#is there any extra row that we can keep ?
+			#let RowCheck examine the time markers
+			numrows2=`RowChecker $entry $headerSize $rowWidth 0 $numrows 2>/dev/null`
+			if [ $numrows2 -gt $numrows ]
+			then
+				numrows=$numrows2
+				totSize=`echo " $headerSize + $numrows * $rowWidth " | bc -l`
+				fitsSize=`echo " 2880 - ($totSize % 2880) " | bc`
+				fitsSize=`echo " $totSize + $fitsSize " | bc -l`
+#				echo $headerSize" "$numrows" "$rowWidth" "$totSize" "$fitsSize
+#				echo " FileSize: "$filesize" should be "$fitsSize
+				truncate -s $totSize $entry 2>/dev/null
+				truncate -s $fitsSize $entry 2>/dev/null
+				echo "Resized   $entry" >> $reportFile
+			fi
+		fi
+	fi
+#done
Index: branches/trigger_burst_research/Archive/repairRawFile.sh
===================================================================
--- branches/trigger_burst_research/Archive/repairRawFile.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/repairRawFile.sh	(revision 18288)
@@ -0,0 +1,78 @@
+#!/bin/bash
+#sourceFolder="./backup"
+#destFolder="./fixed"
+
+#entries=`find $sourceFolder -type f -name '*.fits' | sort`
+
+#for entry in ${entries[@]}
+#do
+#	echo $entry" "$destFolder
+#	cp $entry $destFolder
+#done
+
+#entries=`find $destFolder -type f -name '*.fits' | sort`
+
+#for entry in ${entries[@]}
+#do
+
+if [ "$#" != "5" ]
+then
+	echo "Error: repairAuxFile.sh should be called with 4 arguments please"
+	exit
+fi
+entry=$1
+errorFile=$2
+wrongMjdFile=$3
+reportFile=$4
+processErrorFile=$5
+	#verify file with fverify
+	result=`fverify $entry 2>/dev/null | grep '0 error(s)'`
+	if [ "$result" != "" ]
+	then
+		mjdref=`/home_nfs/isdc/lyard/FACT++/fitsdump $entry -h 2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]+'`
+		if [ "$mjdref" != 40587 ]
+		then
+			echo "$entry" >> $wrongMjdFile
+			result=""
+		fi
+	else
+		echo "$entry" >> $errorFile
+	fi
+	
+	if [ "$result" == "" ]
+	then
+	#fix it !
+#		echo $entry
+		headerSize=`ENDfixer $entry 2>/dev/null`
+		echo "Fixed END $entry" >> $reportFile
+		if [ "$headerSize" == "Error: header length not acceptable" ] || [ "$headerSize" == "Error: too much header space after END keyword" ] 
+		then 
+			echo "File "$entry" looks really messed up: "$headerSize >> $errorFile
+			exit
+		else 
+			filesize=`stat -c%s $entry`
+#			echo $headerSize" "$filesize
+			numrows=`/home_nfs/isdc/lyard/FACT++/fitsdump $entry -h 2>/dev/null | grep 'NAXIS2' | awk '{ print $4 }' | grep -E -o '[0-9]+'`
+			if [ "$numrows" == "" ]
+			then
+				echo "Cannot fitsdump $entry" >> $processErrorFile
+				rm $entry
+				exit
+			fi
+			rowWidth=`/home_nfs/isdc/lyard/FACT++/fitsdump $entry -h 2>/dev/null | grep 'NAXIS1' | awk '{ print $4 }' | grep -E -o '[0-9]+'`
+
+			totSize=`echo " $headerSize + $numrows * $rowWidth " | bc -l`
+			fitsSize=`echo " 2880 - ($totSize % 2880) " | bc`
+			fitsSize=`echo " $totSize + $fitsSize " | bc -l`
+#				echo $headerSize" "$numrows" "$rowWidth" "$totSize" "$fitsSize
+#				echo " FileSize: "$filesize" should be "$fitsSize
+			if [ $filesize > $fitsSize ]
+			then
+				truncate -s $totSize $entry 2>/dev/null
+				truncate -s $fitsSize $entry 2>/dev/null
+				echo "Resized   $entry" >> $reportFile
+			fi
+			
+		fi
+	fi
+#done
Index: branches/trigger_burst_research/Archive/testNaming.sh
===================================================================
--- branches/trigger_burst_research/Archive/testNaming.sh	(revision 18288)
+++ branches/trigger_burst_research/Archive/testNaming.sh	(revision 18288)
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+filename=$1
+
+filenameonly=${filename##*/}
+
+filenameonly=${filenameonly%%.fits}
+
+echo $filenameonly
+
+
+	#first construct the correct file name
+#	filenameonly=${entry##*/}
+#	year="2011"
+#	month=${filenameonly}
+       #second construct the destination path.
+#	filenameonly=${entry##*/}
+#	pathonly=${entry%$filenameonly}
+#	extrapathonly=${pathonly#$sourceFolder/}
+#	targetFolder=$destFolder"/"$extrapathonly
Index: branches/trigger_burst_research/Cron/crontab.coma
===================================================================
--- branches/trigger_burst_research/Cron/crontab.coma	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.coma	(revision 18288)
@@ -0,0 +1,14 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+# cronjobs for coma (Wuerzburg)
+# change permissions for files and directories in /fact
+##########################################################################
+# permissions: 
+#   directories: u:+rwx  g:+rx  o:+rx -> 755
+#   files:       u:+rw   g:+r   o:+r  -> 644
+##########################################################################
+0 8,18 * * * /usr/bin/find /fact/raw/ -type d -exec /bin/chmod 755 {} \;
+0 8,18 * * * /usr/bin/find /fact/raw/ -type f -exec /bin/chmod 644 {} \;
+0 8,18 * * * /usr/bin/find /fact/aux/ -type d -exec /bin/chmod 755 {} \;
+0 8,18 * * * /usr/bin/find /fact/aux/ -type f -exec /bin/chmod 644 {} \;
+# backup of QLA: to be moved to user fact
+#30 8,10,12 * * * /home/fact/FACT.processing/DataCheck/Transfer/BackupQLA.sh
Index: branches/trigger_burst_research/Cron/crontab.fact-transfer
===================================================================
--- branches/trigger_burst_research/Cron/crontab.fact-transfer	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.fact-transfer	(revision 18288)
@@ -0,0 +1,23 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.isdc"
+# cronjobs for user fact_opr (ISDC) on isdc-dl00 (machine for transfer)
+# transfer of aux data from LP to isdc-dl00
+0,30 * * * * if ! ps aux | grep RsyncAuxToISDC | grep -v grep >/dev/null 2>&1  ; then /bin/nice -n 19 /usr/bin/ionice -c 3 ~/DataCheck/Transfer/RsyncAuxToISDC.sh; fi
+# transfer of raw data from LP to isdc-dl00
+0,30 * * * * if ! ps aux | grep RsyncRawToISDC | grep -v grep >/dev/null 2>&1  ; then /bin/nice -n 19 /usr/bin/ionice -c 3 ~/DataCheck/Transfer/RsyncRawToISDC.sh; fi
+# do backup of auxiliary data to Wuerzburg
+15,45 * * * * if ! ps aux | grep BackupAuxToWue | grep -v grep >/dev/null 2>&1  ; then /bin/nice -n 19 /usr/bin/ionice -c 3 ~/DataCheck/Transfer/BackupAuxToWue.sh; fi
+# do backup of raw data to Wuerzburg
+15,45 * * * * /bin/nice -n 19 /usr/bin/ionice -c 3 ~/DataCheck/Transfer/BackupRawToWue.sh
+# check db and global log
+10 * * * * ~/DataCheck/Monitoring/CheckLogs.sh
+0 8,17 * * * ~/DataCheck/Monitoring/CheckStatus.sh
+# check which files can be deleted in La Palma
+0 16 * * * ~/DataCheck/Transfer/CheckTransfer.sh
+# check disk space
+50 * * * * ~/DataCheck/Monitoring/CheckDU.sh
+# rsync of the QLA results every morning
+0 8,10,12 * * * ~/DataCheck/Transfer/BackupQLA.sh
+# cron to study the system performance and test some things
+*/5 * * * *  source ~/.bashrc ; /bin/echo -n `/bin/date +\%F\ \%T`" time for ls: ">>~/time_for_ls_isdc-dl00_new.txt;/usr/bin/time -f \%e /bin/ls >/dev/null 2>>~/time_for_ls_isdc-dl00_new.txt
+*/5 * * * * /usr/sbin/lsof ~/DataCheck/Transfer/BackupRawToWue.sh > /dev/null
Index: branches/trigger_burst_research/Cron/crontab.fact-viewer
===================================================================
--- branches/trigger_burst_research/Cron/crontab.fact-viewer	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.fact-viewer	(revision 18288)
@@ -0,0 +1,34 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.isdc"
+SGE_ROOT="/usr/share/gridengine"
+SGE_CELL="isdc-ge-cell"
+# cronjobs for user fact_opr (ISDC) on isdc-viewer00, running now on isdc-in04 
+# check if raw and aux files needed for the analysis are available in the archive
+*/30 * * * * if ! ps aux | grep CheckRawFilesAvail | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/CheckRawFilesAvail.sh; fi
+*/30 * * * * if ! ps aux | grep CheckAuxFilesAvail | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/CheckAuxFilesAvail.sh; fi
+*/30 * * * * if ! ps aux | grep CheckDriveFileAvail | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/CheckDriveFileAvail.sh; fi
+*/30 * * * * if ! ps aux | grep CheckRatesFileAvail | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/CheckRatesFileAvail.sh; fi
+# build sequences and write sequence files at isdc
+0 */4 * * * if ! ps aux | grep BuildSequences | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/BuildSequences.sh; fi
+# jobmanager to process data
+*/15 * * * * if ! ps aux | grep JobManager | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/JobManager.sh; fi
+# fill auxiliary information to DB
+0 9,10,12,14,18 * * * if ! ps aux | grep FillAuxData | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillAuxData.sh ; fi
+6 9,10,12,14,18 * * * if ! ps aux | grep FillAuxCurrents | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillAuxCurrents.sh ; fi
+12 9,10,12,14,18 * * * if ! ps aux | grep FillAuxThresholds | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillAuxThresholds.sh ; fi
+18 9,10,12,14,18 * * * if ! ps aux | grep FillAuxTemp | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillAuxTemp.sh ; fi
+24 9,10,12,14,18 * * * if ! ps aux | grep FillEffectiveOn | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillEffectiveOn.sh ; fi
+30 9,10,12,14,18 * * * if ! ps aux | grep FillAuxCtrDev | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillAuxCtrDev.sh ; fi
+36 9,10,12,14,18 * * * if ! ps aux | grep FillDrsTemp | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillDrsTemp.sh ; fi
+42 9,10,12,14,18 * * * if ! ps aux | grep FillAuxContTemp | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillAuxContTemp.sh ; fi
+48 9,10,12,14,18 * * * if ! ps aux | grep FillAuxCamHum | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillAuxCamHum.sh ; fi
+0 11,15,18 * * * if ! ps aux | grep FillRatescans | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillRatescans.sh ; fi
+# fill file sizes to DB
+0 15 * * * if ! ps aux | grep FillFileSizes | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillFileSizes.sh ; fi
+# QLA: run-wise ganymed (running on data processed at ISDC)
+0 */2 * * * if ! ps aux | grep Step2a | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/QuickLook/Step2a.sh ; fi
+# fill results of QLA to DB
+30 */2 * * * if ! ps aux | grep FillNumEvts | grep -v grep >/dev/null 2>&1  ; then ~/DataCheck/Processing/FillNumEvts.sh ; fi
+# cron to study the system performance (temporarily used)
+#*/5 * * * * /bin/echo -n `/bin/date +\%F\ \%T`" time for ls: ">>~/time_for_ls_isdc-in04.txt;/usr/bin/time -f \%e /bin/ls >/dev/null 2>>~/time_for_ls_isdc-in04.txt
+*/5 * * * * source ~/.bashrc ; /bin/echo -n `/bin/date +\%F\ \%T`" time for ls: ">>~/time_for_ls_isdc-in04_new.txt;/usr/bin/time -f \%e /bin/ls >/dev/null 2>>~/time_for_ls_isdc-in04_new.txt
Index: branches/trigger_burst_research/Cron/crontab.fact01
===================================================================
--- branches/trigger_burst_research/Cron/crontab.fact01	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.fact01	(revision 18288)
@@ -0,0 +1,19 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.lp.gate"
+# cronjobs for fact01 alias gate
+# write files for David's MWL campaign webpage
+0 8 * * * cd /home/factwww/mwlinfo ; /users/fact/SW.automatic.processing/DataCheck/DataCheck/InfoForMWL.sh
+# check time offset of machine
+0 */6 * * * /users/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
+# backup databases 
+0 8 * * * /users/fact/SW.automatic.processing/DataCheck/Transfer/BackupDatabase.sh
+# copy weather station data to mysql db (caused problems)
+#*/5 * * * * /usr/bin/wview-mysql-export
+# overlay celestial objects to skycam images every 5 min during night
+# magic skycam
+*/5 18-23,0-8 * * *  /home/fact/allskysources/allSkySources.sh
+# fact skycam
+*/5 18-23,0-8 * * *  /home/fact/allskysources_cp/allSkySources.sh
+# check shift calendar
+SHELL=/bin/bash
+5 8 * * * for ((i=0;i<8;i++)) do date=`date -u +\%Y\%m\%d --date='+'$i'day'` ; /usr/bin/php -f /home/factwww/dch/shiftinfo.php date=$date > /dev/null; done
Index: branches/trigger_burst_research/Cron/crontab.fact02
===================================================================
--- branches/trigger_burst_research/Cron/crontab.fact02	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.fact02	(revision 18288)
@@ -0,0 +1,4 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+# cronjobs for fact02 alias aux
+# check time offset of machine
+0 */6 * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
Index: branches/trigger_burst_research/Cron/crontab.fact04
===================================================================
--- branches/trigger_burst_research/Cron/crontab.fact04	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.fact04	(revision 18288)
@@ -0,0 +1,4 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+# cronjobs for fact04 alias gui
+# check time offset of machine
+0 */6 * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
Index: branches/trigger_burst_research/Cron/crontab.fact1
===================================================================
--- branches/trigger_burst_research/Cron/crontab.fact1	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.fact1	(revision 18288)
@@ -0,0 +1,14 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.lp.data"
+# cronjobs for fact1 alias data
+# check time offset of machine
+0 */6 * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
+# compress data
+*/5 19-23,0-7 * * * if ! ps aux | grep ZipRawData | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Transfer/ZipRawData.sh ; fi
+42 8-18 * * * if ! ps aux | grep ZipRawData | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Transfer/ZipRawData.sh ; fi
+# fill moon information
+42 * * * * if ! ps aux | grep FillMoonInfo | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Processing/FillMoonInfo.sh ; fi
+# check disk usage for /loc_data
+50 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckDU.sh
+# check global log for WARN and ERROR
+10 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckLogs.sh
Index: branches/trigger_burst_research/Cron/crontab.fact2
===================================================================
--- branches/trigger_burst_research/Cron/crontab.fact2	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.fact2	(revision 18288)
@@ -0,0 +1,32 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.lp.data"
+# cronjobs for fact2 alias daq
+## check network status 
+#*/15 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNetworkStatus.sh
+# check time offset of machine
+0 */6 * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
+# check disk usage for /loc_data
+50 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckDU.sh
+# rsync auxfiles from newdaq via daq to data
+*/15 * * * * if ! ps aux | grep RsyncAuxLP | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Transfer/RsyncAuxLP.sh ; fi
+# first step of the QLA: rsync newdaq->daq, sequ writing, callisto, star
+*/5 19-23,0-7 * * * if ! ps aux | grep Step1 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step1.sh ; fi
+0 8-18 * * * if ! ps aux | grep Step1 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step1.sh ; fi
+# third step of the QLA: creating lightcurves for the webpage
+2-59/5 19-23,0-7 * * * if ! ps aux | grep Step3 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step3.sh run ; fi
+0 8-18 * * * if ! ps aux | grep Step3 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step3.sh run ; fi
+# create quality plots
+0 * * * * /home/fact/SW.automatic.processing/DataCheck/DataCheck/PlotQuality.sh
+# backup of the QLA results to newdaq
+0 8,10,12 * * * /home/fact/SW.automatic.processing/DataCheck/Transfer/BackupQLA.sh
+# get some images for later data check every 5 minutes during night
+# lidcam
+*/5 18-23,0-8 * * * mkdir -p /loc_data/analysis/lidcam_images/`date -u +\%Y/\%m/\%d` && /usr/bin/cvlc -q rtsp://10.0.100.87/live/h264 --rate=2 --video-filter=scene --vout=dummy --aout=dummy --start-time=1 --stop-time=2 --scene-format=jpg --scene-replace --scene-prefix=lidcamimage_`date -u +\%Y\%m\%d_\%H\%M\%S` --scene-path=/loc_data/analysis/lidcam_images/`date -u +\%Y/\%m/\%d` vlc://quit 2>/dev/null
+# fact skycam
+*/5 18-23,0-8 * * * mkdir -p /loc_data/analysis/skycam_images/`date -u +\%Y/\%m/\%d` && /usr/bin/cvlc -q rtsp://10.0.100.84/live/h264 --rate=2 --video-filter=scene --vout=dummy --aout=dummy --start-time=1 --stop-time=2 --scene-format=jpg --scene-replace --scene-prefix=skycamimage_`date -u +\%Y\%m\%d_\%H\%M\%S` --scene-path=/loc_data/analysis/skycam_images/`date -u +\%Y/\%m/\%d` vlc://quit 2>/dev/null
+# concam 
+*/5 18-23,0-8 * * *  mkdir -p /loc_data/analysis/concam_images/`date -u +\%Y/\%m/\%d` && wget -q -O /loc_data/analysis/concam_images/`date -u +\%Y/\%m/\%d`/concam_allskyimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg http://catserver.ing.iac.es/weather/archive/concam/tmp/concam.jpg
+# gtc allsky image 
+*/5 18-23,0-8 * * *  mkdir -p /loc_data/analysis/gtc_images/`date -u +\%Y/\%m/\%d` && wget -q -O /loc_data/analysis/gtc_images/`date -u +\%Y/\%m/\%d`/gtc_allskyimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg http://www.gtc.iac.es/multimedia/netcam/camaraAllSky.jpg
+# magic allsky image 
+*/5 18-23,0-8 * * *  mkdir -p /loc_data/analysis/magic_images/`date -u +\%Y/\%m/\%d` && wget -q -O /loc_data/analysis/magic_images/`date -u +\%Y/\%m/\%d`/magic_allskyimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg http://www.magic.iac.es/site/weather/AllSkyCurrentImage.JPG
Index: branches/trigger_burst_research/Cron/crontab.gate
===================================================================
--- branches/trigger_burst_research/Cron/crontab.gate	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.gate	(revision 18288)
@@ -0,0 +1,23 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.lp.gate"
+# cronjobs for fact01 alias gate
+# write files for David's MWL campaign webpage
+0 8 * * * cd /home/factwww/mwlinfo ; /users/fact/SW.automatic.processing/DataCheck/DataCheck/InfoForMWL.sh
+# check time offset of machine
+0 */6 * * * /users/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
+# backup databases 
+0 8 * * * /users/fact/SW.automatic.processing/DataCheck/Transfer/BackupDatabase.sh
+# copy weather station data to mysql db (caused problems)
+#*/5 * * * * /usr/bin/wview-mysql-export
+# overlay celestial objects to skycam images every 5 min during night
+# magic skycam
+*/5 18-23,0-8 * * *  /home/fact/allskysources/allSkySources.sh
+# fact skycam
+*/5 18-23,0-8 * * *  /home/fact/allskysources_cp/allSkySources.sh
+
+# this script fills the sqm data from the last 3 nights into the runinfo db
+0 12 * * * /home/fact/DataCheck/fill_sqm_data_into_db.py > /dev/null
+
+# check shift calendar
+SHELL=/bin/bash
+5 8 * * * for ((i=0;i<8;i++)) do date=`date -u +\%Y\%m\%d --date='+'$i'day'` ; /usr/bin/php -f /home/factwww/dch/shiftinfo.php date=$date > /dev/null; done
Index: branches/trigger_burst_research/Cron/crontab.newdaq
===================================================================
--- branches/trigger_burst_research/Cron/crontab.newdaq	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.newdaq	(revision 18288)
@@ -0,0 +1,15 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.lp.data"
+# cronjobs for newdaq
+# check time offset of machine
+0 */6 * * * /usr/bin/nice -n 19 /usr/bin/ionice -c 3 /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
+# check raw data and fill information to DB
+*/5 19-23,0-7 * * * if ! ps aux | grep CheckRawData | grep -v grep >/dev/null 2>&1  ; then /usr/bin/nice -n 19 /usr/bin/ionice -c 3 /home/fact/SW.automatic.processing/DataCheck/Processing/CheckRawData.sh ; fi
+21 8-18 * * * if ! ps aux | grep CheckRawData | grep -v grep >/dev/null 2>&1  ; then /usr/bin/nice -n 19 /usr/bin/ionice -c 3 /home/fact/SW.automatic.processing/DataCheck/Processing/CheckRawData.sh ; fi
+# fill auxiliary data
+1-59/5 19-23,0-7 * * * if ! ps aux | grep FillAuxData | grep -v grep >/dev/null 2>&1  ; then /usr/bin/nice -n 19 /usr/bin/ionice -c 3 /home/fact/SW.automatic.processing/DataCheck/Processing/FillAuxData.sh ; fi
+21 8-18 * * * if ! ps aux | grep FillAuxData | grep -v grep >/dev/null 2>&1  ; then /usr/bin/nice -n 19 /usr/bin/ionice -c 3 /home/fact/SW.automatic.processing/DataCheck/Processing/FillAuxData.sh ; fi
+# check disk usage for /loc_data
+50 * * * * /usr/bin/nice -n 19 /usr/bin/ionice -c 3 /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckDU.sh
+# check network status 
+*/15 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNetworkStatus.sh
Index: branches/trigger_burst_research/Cron/crontab.root
===================================================================
--- branches/trigger_burst_research/Cron/crontab.root	(revision 18288)
+++ branches/trigger_burst_research/Cron/crontab.root	(revision 18288)
@@ -0,0 +1,4 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+# cronjobs for root user in LP
+*/2 * * * * /usr/sbin/iotop -o -b -n 1 -d 5 > /loc_data/stat/iotop`/bin/date  +\%F-\%H-\%M-\%S`.txt
+*/2 * * * * /usr/bin/top -b -n 1 > /loc_data/stat/top`/bin/date  +\%F-\%H-\%M-\%S`.txt
Index: branches/trigger_burst_research/DataCheck/InfoForMWL.sh
===================================================================
--- branches/trigger_burst_research/DataCheck/InfoForMWL.sh	(revision 18288)
+++ branches/trigger_burst_research/DataCheck/InfoForMWL.sh	(revision 18288)
@@ -0,0 +1,105 @@
+#!/bin/bash
+#
+# script to read observation information from the database
+# format: as given by David
+# MJD DATE START STOP FLUX COMMENT
+# for times before midnight: eg 22:33 -> -01:27
+
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 $1"
+
+# getting db info for query
+getdbsetup
+
+function get_table()
+{
+   printprocesslog "INFO writing "$file
+   query="SELECT Datediff(Max(fRunStop), '1858-11-17') as MJD, "
+   query=$query" Date_format(Max(fRunStop), '%Y/%m/%d') as DATE, "
+   #query=$query" Date_format(Min(fRunStart), '%H:%i') as STARTOLD, "
+   query=$query" if(Hour(Min(fRunStart))>10, Concat('-', Time_format(Addtime(Timediff(Concat(STR_TO_DATE(fNight, '%Y%m%d'), ' 23:59:59'), Min(fRunStart)), '0 0:1:0.0'), '%H:%i')), Date_format(Min(fRunStart), '%H:%i')) as START, "
+   #query=$query" Date_format(Max(fRunStop), '%H:%i') as STOPOLD, "
+   query=$query" if(Hour(Max(fRunStop))>10, Concat('-', Time_format(Addtime(Timediff(Concat(STR_TO_DATE(fNight, '%Y%m%d'), ' 23:59:59'), Max(fRunStop)), '0 0:1:0.0'), '%H:%i')), Date_format(Max(fRunStop), '%H:%i')) as STOP, "
+   query=$query" CONCAT('---') as FLUX, "
+   #query=$query" CONCAT('#') as Comments"
+   #query=$query" CONCAT('# ', Timediff(Max(fRunStop), Min(fRunStart)), ' ontime: ', Convert(Sum(Time_to_sec(Timediff(fRunStop, fRunStart)))/60./60., decimal(2,1))) AS Comments"
+   query=$query" IF (ISNULL(Sum(fOnTime)), CONCAT('#'), CONCAT('# ontime: ', Convert(Sum(fOntime)/3600, decimal(2,1)), 'h')) AS Comments"
+   query=$query" FROM RunInfo "
+   query=$query" WHERE fSourceKEY="$sourcekey
+   query=$query" AND fRunTypeKEY=1 "
+   query=$query" AND fNight BETWEEN "$start" AND "$stop
+   query=$query" GROUP BY fNight "
+   query=$query" ORDER BY fNight "
+   #echo $query
+   #echo ""
+   printprocesslog "DEBUG sendquery QUERY: "$query
+   # sendquery can't be used as it writes everything into a variable
+   mysql -s -u $us --password=$pw --host=$ho $db -e " $query " > $file
+}
+
+##mrk501
+#sourcekey=2
+#file="Mrk501_FACT_observations_2014.txt"
+#start=20140101 
+#stop=20141030
+#get_table
+#exit
+
+#mrk421
+sourcekey=1
+
+file="Mrk421_FACT_observations_all.txt"
+start=20111115
+stop=20200101
+get_table
+
+file="Mrk421_FACT_observations_2011_2012.txt"
+start=20120101
+stop=20120701
+get_table
+
+file="Mrk421_FACT_observations_2012_2013.txt"
+start=20121001
+stop=20130930
+get_table
+
+file="Mrk421_FACT_observations_2013_2014.txt"
+start=20131001
+stop=20140930
+get_table
+
+file="Mrk421_FACT_observations_2014_2015.txt"
+start=20141001
+stop=20150930
+get_table
+
+
+#mrk501
+sourcekey=2
+
+file="Mrk501_FACT_observations_all.txt"
+start=20120501 #really started at 19.5.2012
+stop=20200101
+get_table
+
+file="Mrk501_FACT_observations_2012.txt"
+start=20120501 
+stop=20121030
+get_table
+
+file="Mrk501_FACT_observations_2013.txt"
+start=20130101 
+stop=20131030
+get_table
+
+file="Mrk501_FACT_observations_2014.txt"
+start=20140101 
+stop=20141030
+get_table
+
+file="Mrk501_FACT_observations_2015.txt"
+start=20150101 
+stop=20151030
+get_table
+
Index: branches/trigger_burst_research/DataCheck/PlotQuality.sh
===================================================================
--- branches/trigger_burst_research/DataCheck/PlotQuality.sh	(revision 18288)
+++ branches/trigger_burst_research/DataCheck/PlotQuality.sh	(revision 18288)
@@ -0,0 +1,53 @@
+#!/bin/bash
+#
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 $1"
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'\|'^[0-9][0-9][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   if [ "$1" = "" ]
+   then 
+      # get last 6 nights if hour between 7 and 19h, else only current night
+      getdates 3 7 19
+   else
+      getdates $1
+   fi
+fi
+
+# for testing only
+anapath=/loc_data/analysis
+
+cd $mars
+
+for date in ${dates[@]}
+do 
+   #echo "processing "$date"..."
+   outpath=$anapath/quality/$date
+   makedir $outpath 
+   logfile=$anapath/quality/$date/quality.log
+   year=`echo $date | cut -d/ -f1`
+   month=`echo $date | cut -d/ -f2 | sed -e 's/^0//g'`
+   day=`echo $date | cut -d/ -f3 | sed -e 's/^0//g'`
+   printprocesslog "DEBUG root -q -b fact/plots/quality.C\("$year"\,"$month"\,"$day"\,\""$outpath"\"\) | tee $logfile | intgrep"
+   check1=`root -q -b fact/plots/quality.C\($year\,$month\,$day\,\""$outpath"\"\) 2>&1 | tee $logfile | intgrep`
+   
+   case $check1 in
+      0)   printprocesslog "INFO quality.C was successful for date "$date" (check1=$check1)"
+           ;;
+      *)   printprocesslog "ERROR quality.C failed for date "$date" (check1=$check1)"
+           ;;
+   esac
+done
+
+
+finish
+
Index: branches/trigger_burst_research/DataCheck/PlotRatescans.sh
===================================================================
--- branches/trigger_burst_research/DataCheck/PlotRatescans.sh	(revision 18288)
+++ branches/trigger_burst_research/DataCheck/PlotRatescans.sh	(revision 18288)
@@ -0,0 +1,50 @@
+#!/bin/bash
+#
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 $1"
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'\|'^[0-9][0-9][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   if [ "$1" = "" ]
+   then 
+      # get last 6 nights if hour between 7 and 19h, else only current night
+      getdates 3 7 19
+   else
+      getdates $1
+   fi
+fi
+
+# for testing only
+anapath=/loc_data/analysis
+
+cd $mars
+
+for date in ${dates[@]}
+do 
+   outpath=$anapath/ratescans/$date
+   makedir $outpath
+   date2=`echo $date | sed -e 's/\///g'`
+   logfile=$outpath/$date2.log
+   printprocesslog "INFO root -q -b fact/plots/plotratescan.C\("\"$date2\""\) | tee $logfile | intgrep"
+   check1=`root -q -b fact/plots/plotratescan.C\("\"$date2\""\) | tee $logfile | intgrep`
+   
+   case $check1 in
+      0)   printprocesslog "INFO plotratescan.C was successful for date "$date2" (check1=$check1)"
+           ;;
+      *)   printprocesslog "ERROR plotratescan.C failed for date "$date2" (check1=$check1)"
+           ;;
+   esac
+done
+
+
+finish
+
Index: branches/trigger_burst_research/DataCheck/SchedulingPlots.sh
===================================================================
--- branches/trigger_burst_research/DataCheck/SchedulingPlots.sh	(revision 18288)
+++ branches/trigger_burst_research/DataCheck/SchedulingPlots.sh	(revision 18288)
@@ -0,0 +1,54 @@
+#!/bin/bash
+#
+# Script to copy scheduling plots to the web
+#
+
+# remarks:
+#
+# be aware that the script only writes files which don't exist yet
+# i.e. in case of a new sw version, old files have to be deleted or moved away
+#
+# script to be executed on gate, i.e. FACT++ there has to be up-to-date
+
+# create plots for past (-) / future (+) night
+directions=( "-" "+" )
+
+# number of days to be treated
+numdays=1000
+
+plot=/home/fact/FACT++/makeplots
+root=/opt/root_v5.34.10/bin/thisroot.sh
+
+# creating files for X days (if they don't exist yet)
+for (( i=0; i < $numdays ; i++))
+do 
+   for direction in ${directions[@]}
+   do
+      outpath=/home/factwww/scheduling/`date --date="${direction}${i}days" +%Y/%m/%d`
+      date=`date --date="${direction}${i}days" +%Y-%m-%d`
+      echo -n $date
+
+      if ! [ -d $outpath ]
+      then
+         mkdir -p $outpath
+      else
+         echo $outpath" exists -> continue"
+         continue 
+      fi
+      echo -n ": creating files..."
+      cd $outpath 
+      $plot $date --max-zd=75 --max-current=90 >/dev/null 2>&1
+      files=`ls *.eps`
+      for file in $files
+      do 
+         if ls $file | grep Legend >/dev/null
+         then
+            scale=0.8
+         else
+            scale=1.5
+         fi
+         pstoimg -quiet -scale $scale $file
+      done
+      echo ""
+   done
+done
Index: branches/trigger_burst_research/Monitoring/CheckDU.sh
===================================================================
--- branches/trigger_burst_research/Monitoring/CheckDU.sh	(revision 18288)
+++ branches/trigger_burst_research/Monitoring/CheckDU.sh	(revision 18288)
@@ -0,0 +1,69 @@
+#!/bin/bash
+#
+# Script to check whether disk is full
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+# possible limits
+limits=( 209715200 524288000 1073741824 2147483648 )
+texts=(   "200 GB"  "500 GB"     "1 TB"     "2 TB" )
+
+# set standard limits
+lowlimit=1
+highlimit=2
+
+# get paths depending on host
+case $HOSTNAME in
+#        data) dirs=( "/loc_data" "/daq" "/newdaq" )
+        data) dirs=( "/loc_data" )
+              ;;
+         daq) dirs=( "/raid10" )
+              ;;
+      newdaq) dirs=( "/fact" )
+              ;;
+   isdc-dl00) dirs=( "/gpfs" "/scratch" )
+              highlimit=1
+              # better: array of limits
+              ;;
+           *) echo "no valid host "$HOSTNAME
+              exit
+              ;;
+esac
+
+# get current hour
+hour=`date +%k`
+
+# define disk space limit for check depending on the time
+if [ $hour -lt 8 ] || [ $hour -gt 15 ]
+then
+   # during night
+   dulimit=${limits[$lowlimit]}
+   dutext=${texts[$lowlimit]}
+else
+   # during day
+   dulimit=${limits[$highlimit]}
+   dutext=${texts[$highlimit]}
+fi
+
+for dir in ${dirs[@]}
+do 
+   # check if directory is mounted (check if empty) 
+   if [ "$(ls -A $dir)" ]
+   then 
+      # get available disk space
+      diskusage=( `df -P $dir | grep $dir ` )
+      # check if more than X GB are left on /loc_data
+      if [ ${diskusage[3]} -lt $dulimit ] 
+      then
+         echo "WARN less than "$dutext" left on "$dir" on node "$HOSTNAME" ("${diskusage[3]}")"
+         printprocesslog "DISK less than "$dutext" left on "$dir" on node "$HOSTNAME" ("${diskusage[3]}")"
+         df -h $dir
+         echo ""
+      fi
+   else
+      echo "ERROR "$dir" seems to be not mounted."
+   fi
+done
+
Index: branches/trigger_burst_research/Monitoring/CheckLogs.sh
===================================================================
--- branches/trigger_burst_research/Monitoring/CheckLogs.sh	(revision 18288)
+++ branches/trigger_burst_research/Monitoring/CheckLogs.sh	(revision 18288)
@@ -0,0 +1,39 @@
+#!/bin/bash
+#
+# This script checks, if there are WARNs or ERRORs in the processlog of 
+# the last night and sends them to the emailadresse(s) in $erradr
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+lasthour=`date +'%F %H' --date="-1hour"`
+
+processlogfile=$processlogpath/process`date +%F --date="-1hour"`.log
+
+if grep -E "^$lasthour.*WARN" $processlogfile >/dev/null
+then 
+   grep -E "^$lasthour.*WARN" $processlogfile | mail -s 'found warnings in '$processlogfile $erradrs
+   printprocesslog "INFO sent warnings to "$erradrs
+fi
+
+if grep -E "^$lasthour.*ERROR" $processlogfile >/dev/null
+then 
+   grep -E "^$lasthour.*ERROR" $processlogfile | mail -s 'found errors in '$processlogfile $erradrs
+   printprocesslog "INFO sent errors to "$erradrs
+fi
+
+if grep -E "^$lasthour.*CONNECTION" $processlogfile >/dev/null
+then 
+   grep -E "^$lasthour.*CONNECTION" $processlogfile | mail -s 'found connection problems found ('$processlogfile')' $erradrs
+   printprocesslog "INFO sent connection-problem info to "$erradrs
+fi
+
+if grep -E "^$lasthour.*DISK" $processlogfile >/dev/null
+then 
+   grep -E "^$lasthour.*DISK" $processlogfile | mail -s 'disk full ('$processlogfile')' $deladrs
+   printprocesslog "INFO sent full-disk info to "$deladrs
+fi
+
+printprocesslog "INFO finished $0"
+
Index: branches/trigger_burst_research/Monitoring/CheckNTPDate.sh
===================================================================
--- branches/trigger_burst_research/Monitoring/CheckNTPDate.sh	(revision 18288)
+++ branches/trigger_burst_research/Monitoring/CheckNTPDate.sh	(revision 18288)
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# script to check whether the time of the computer is off
+# offset is inserted into the database
+# 
+
+offset=`/usr/sbin/ntpdate -d ptbtime1.ptb.de 2>/dev/null | grep '^offset*' | cut -d' ' -f2`
+query="INSERT NTPDate SET fOffset="$offset", fMachineKEY=(SELECT fMachineKEY FROM Machine WHERE fMachineName='"$HOSTNAME"'), fTime=Now()"
+# host needs full address to work also on fact01
+/usr/bin/mysql -u systemstatus --host=fact01.fact.local systemstatus -e "$query"
+
+if [ $(echo " $offset > 1  ||  $offset < -1 " | bc) -eq 1 ]
+then
+   echo `date`": time difference of "$offset" sec on "$HOSTNAME
+fi
+
Index: branches/trigger_burst_research/Monitoring/CheckNetworkStatus.sh
===================================================================
--- branches/trigger_burst_research/Monitoring/CheckNetworkStatus.sh	(revision 18288)
+++ branches/trigger_burst_research/Monitoring/CheckNetworkStatus.sh	(revision 18288)
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+numnetworks=6
+
+vals=(`/sbin/ifconfig -s | tr -s ' ' | cut -d' ' -f1,5,9 | grep -v RX `)
+#echo ${vals[@]}
+#echo ${#vals[@]}
+
+numvals=`echo " $numnetworks * 3 + 3" | bc -l`
+#echo $numvals
+if [ ${#vals[@]} -ne $numvals ]
+then 
+   echo "ERROR Not all "$numnetworks" networks available."
+   exit
+fi
+
+for (( i=0 ; i < $numnetworks ; i++ )) 
+do 
+   query="SELECT fRXErrors, fTXErrors FROM NetworkErrors WHERE fEth="$i" ORDER BY fTime DESC LIMIT 0,1;" 
+   #echo $query
+   errors=( `/usr/bin/mysql -u systemstatus --host=fact01 systemstatus -s -e "$query"` )
+   #echo ${errors[@]}
+   if [ ${errors[0]} -lt ${vals[$i*3+1]} ]
+   then 
+      echo "RX errors increased to "${errors[0]}" on eth"$i
+   fi
+   if [ ${errors[1]} -lt ${vals[$i*3+2]} ]
+   then
+      echo "TX errors increased to "${errors[0]}" on eth"$i
+   fi
+   query="INSERT NetworkErrors SET fEth="$i", fRXErrors="${vals[$i*3+1]}", fTXErrors="${vals[$i*3+2]}", fTime=Now()"
+   /usr/bin/mysql -u systemstatus --host=fact01 systemstatus -e "$query"
+done
+
Index: branches/trigger_burst_research/Monitoring/CheckStatus.sh
===================================================================
--- branches/trigger_burst_research/Monitoring/CheckStatus.sh	(revision 18288)
+++ branches/trigger_burst_research/Monitoring/CheckStatus.sh	(revision 18288)
@@ -0,0 +1,77 @@
+#!/bin/bash
+#
+# This script checks the status tables in the DB for failed and crashed jobs
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+tables=( RawFileAvailISDC RawFileAvailWue RawFileRsyncedISDC SequenceFileAvailISDC AuxFilesAvailISDC DriveFileAvailISDC Callisto Star )
+
+for step in ${tables[@]}
+do
+   getstepinfo
+   counter=0
+   for prim in ${prims[@]}
+   do
+      if [ $counter -eq 0 ]
+      then
+         selstart=" CONCAT("$prim
+      fi
+      if [ $counter -gt 0 ]
+      then
+         selstart=$selstart" '_', LPAD("$prim", 3, 0)"
+      fi
+      counter=`echo $counter + 1 | bc -l`
+      #if [ $counter -eq ${#prims[@]} ]
+      #then
+      #   sel=$sel")"
+      #fi
+   done
+   
+   # check table for failed jobs
+   printprocesslog "INFO checking "$step" for failed jobs."
+   where=" WHERE NOT ISNULL(fReturnCode) "
+   where=$where" AND fStopTime > ADDDATE(NOW(), INTERVAL -25 HOUR)"              
+   query="SELECT Count(*) FROM "$step"Status "$where
+   num=`sendquery`
+   if [ "$num" == "" ]
+   then 
+      printprocesslog "WARN could not get number of failed jobs from the DB."
+   else
+      if [ $num -gt 0 ]
+      then
+         printprocesslog "WARN found in "$step" "$num" rows with errors."
+         sel=$selstart", '(', fReturnCode, ')')"
+         query="SELECT "$sel" FROM "$step"Status "$where
+         sendquery | mail -s 'found failed jobs in '$step $erradrs
+         printprocesslog "INFO sent mail about failed jobs in "$step" to "$erradrs
+      fi
+   fi
+
+   # check table for crashed jobs
+   #   i.e. jobs which are running > 2 hours
+   # check the last 27 hours
+   printprocesslog "INFO checking "$step" for crashed jobs."
+   where=" WHERE NOT ISNULL(fStartTime) AND ISNULL(fStopTime) "
+   where=$where" AND fStartTime < ADDDATE(NOW(), INTERVAL -2 HOUR)"
+   where=$where" AND fStartTime > ADDDATE(NOW(), INTERVAL -27 HOUR)"
+   query="SELECT Count(*) FROM "$step"Status "$where
+   num=`sendquery`
+   if [ "$num" == "" ]
+   then 
+      printprocesslog "WARN could not get number of crashed jobs from the DB."
+   else
+      if [ $num -gt 0 ]
+      then
+         printprocesslog "WARN found in "$step" "$num" crashed jobs."
+         sel=$selstart", '(', fStartTime, ')')"
+         query="SELECT "$sel" FROM "$step"Status "$where
+         sendquery | mail -s 'found crashed jobs in '$step $erradrs
+         printprocesslog "INFO sent mail about crashed jobs in "$step" to "$erradrs
+      fi
+   fi
+done
+
+printprocesslog "INFO finished $0"
+
Index: branches/trigger_burst_research/Processing/BuildSequences.sh
===================================================================
--- branches/trigger_burst_research/Processing/BuildSequences.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/BuildSequences.sh	(revision 18288)
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=BuildSequences
+
+# removed, as this is taken care of in crontab
+#set -C
+## check if script is already running
+#lockfile=$lockpath/lock-$program.txt
+#checklock 
+
+logfile=$runlogpath"/BuildSequences-"$datetime".log"
+date >> $logfile
+
+cd $mars
+
+check1=`root -q -b fact/processing/buildseqentries.C+\("\"$datapath\""\,"\"$seqpath\""\,1\,kFALSE\) | tee $logfile | intgrep`
+
+case $check1 in
+   1)   printprocesslog "INFO built sequences was successfully (check1=$check1)"
+        ;;
+   0)   printprocesslog "WARN connection to DB failed (check1=$check1)"
+        #check="no"
+        ;;
+   *)   printprocesslog "ERROR buildseqentries.C failed (check1=$check1)"
+        #check=$check1
+        ;;
+esac
+
+finish 
+
Index: branches/trigger_burst_research/Processing/CheckAuxFilesAvail.sh
===================================================================
--- branches/trigger_burst_research/Processing/CheckAuxFilesAvail.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/CheckAuxFilesAvail.sh	(revision 18288)
@@ -0,0 +1,68 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=CheckAuxFilesAvail
+step=AuxFilesAvailISDC
+transferdelay=3 #days
+
+set -C
+
+# check if script is already running
+lockfile=$lockpath/lock-$program.txt
+checklock 
+
+# get todo list
+gettodo 
+
+
+for (( s=0 ; s < $num ; s++ ))
+do
+   night=${primaries[$s]}
+   currentnight=`date +%Y%m%d`
+   #checknight=`echo " $currentnight - $transferdelay " | bc -l`
+   checknight=`date +%Y%m%d --date="-${transferdelay}day"`
+   
+   setstatus "start" 
+
+   auxpath=$auxdata"/"`echo $night | cut -c 1-4`/`echo $night | cut -c 5-6`/`echo $night | cut -c 7-8`
+   drivefile=$auxpath/$night".DRIVE_CONTROL_TRACKING_POSITION.fits"
+   weatherfile=$auxpath/$night".MAGIC_WEATHER_DATA.fits"
+   ratesfile=$auxpath/$night".FTM_CONTROL_TRIGGER_RATES.fits"
+   temperaturefile=$auxpath/$night".FSC_CONTROL_TEMPERATURE.fits"
+   humidityfile=$auxpath/$night".FSC_CONTROL_HUMIDITY.fits"
+
+   test -e $drivefile 
+   check1=$?
+   test -e $weatherfile 
+   check2=$?
+   test -e $ratesfile 
+   check3=$?
+   test -e $temperaturefile 
+   check4=$?
+   test -e $humidityfile 
+   check5=$?
+   
+   totalcheck=`echo $check1 + $check2 + $check3 + $check4 + $check5 | bc -l`
+   totalcheck2=${check1}${check2}${check3}${check4}${check5}
+
+   case $totalcheck in
+      0)   printprocesslog "INFO found auxfiles for night "$night"."
+           ;;
+      *)   if [ $night -lt $checknight ]
+           then 
+              printprocesslog "WARN "$totalcheck" auxiliary files missing ("$totalcheck2") for night "$night"."
+              check=$totalcheck2
+           else
+              check="no"
+              printprocesslog "INFO "$totalcheck" auxiliary files missing ("$totalcheck2") for night "$night"."
+           fi
+           ;;
+   esac
+
+   setstatus "stop" 
+done
+
+finish 
+
Index: branches/trigger_burst_research/Processing/CheckDriveFileAvail.sh
===================================================================
--- branches/trigger_burst_research/Processing/CheckDriveFileAvail.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/CheckDriveFileAvail.sh	(revision 18288)
@@ -0,0 +1,52 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=CheckDriveFileAvail
+step=DriveFileAvailISDC
+transferdelay=3 #days
+
+set -C
+
+# check if script is already running
+lockfile=$lockpath/lock-$program.txt
+checklock 
+
+# get todo list
+gettodo 
+
+
+for (( s=0 ; s < $num ; s++ ))
+do
+   night=${primaries[$s]}
+   currentnight=`date +%Y%m%d`
+   #checknight=`echo " $currentnight - $transferdelay " | bc -l`
+   checknight=`date +%Y%m%d --date="-${transferdelay}day"`
+   
+   setstatus "start" 
+
+   auxpath=$auxdata"/"`echo $night | cut -c 1-4`/`echo $night | cut -c 5-6`/`echo $night | cut -c 7-8`
+   drivefile=$auxpath/$night".DRIVE_CONTROL_TRACKING_POSITION.fits"
+
+   test -e $drivefile 
+   check1=$?
+   case $check1 in
+      0)   printprocesslog "INFO found "$drivefile
+           ;;
+      *)   if [ $night -lt $checknight ]
+           then 
+              printprocesslog "WARN "$drivefile" missing (check1="$check1")."
+              check=$check1
+           else
+              check="no"
+              printprocesslog "INFO "$drivefile" missing (check1="$check1")."
+           fi
+           ;;
+   esac
+
+   setstatus "stop" 
+done
+
+finish 
+
Index: branches/trigger_burst_research/Processing/CheckNewSetup.sh
===================================================================
--- branches/trigger_burst_research/Processing/CheckNewSetup.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/CheckNewSetup.sh	(revision 18288)
@@ -0,0 +1,56 @@
+#!/bin/bash
+#
+
+path=/home_nfs/isdc/fact_opr
+oldMars=$path/Mars.von.Thomas.2012.06.19
+newMars=$path/Mars.von.Thomas.2012.06.22
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+echo "MARS:"
+echo "  old:  "$oldMars
+echo "  new:  "$newMars
+echo "  set:  "$mars
+echo ""
+echo "SETUP:"
+echo "  out:  "$datapath
+echo "  ROOT: "$ROOTSYS
+echo ""
+echo ""
+echo ""
+echo " OK? (y/n) "
+
+answer=`head -n 1`
+#echo "answer: "$answer
+
+case $answer in
+   y) echo "continue checking files"
+      echo "=== === === === === === === === === === === === === === === === === === === === === === === === === === === === === ==="
+      sleep 1
+      echo ""
+      echo ""
+      ;;
+   n) echo "exit"
+      exit
+      ;;
+   *) echo "invalid answer "$answer
+      echo "exit"
+      exit
+      ;;
+esac
+
+list=( fact/callisto6.C fact/star.C fact/merpp3.C fact/camtemp.C fact/ctrldev.C \ 
+       fact/magictemp.C fact/fillcalib.C fact/calibrate.C fact/curavg.C \
+       datacenter/macros/buildseqentriesf.C )
+
+
+for tool in ${list[@]}
+do 
+   echo "===> checking "$tool
+   diff $oldMars"/"$tool $newMars"/"$tool
+   echo ""
+   echo ""
+   echo ""
+done
+
Index: branches/trigger_burst_research/Processing/CheckRatesFileAvail.sh
===================================================================
--- branches/trigger_burst_research/Processing/CheckRatesFileAvail.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/CheckRatesFileAvail.sh	(revision 18288)
@@ -0,0 +1,52 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=CheckRatesFileAvail
+step=RatesFileAvailISDC
+transferdelay=3 #days
+
+set -C
+
+# check if script is already running
+lockfile=$lockpath/lock-$program.txt
+checklock 
+
+# get todo list
+gettodo 
+
+
+for (( s=0 ; s < $num ; s++ ))
+do
+   night=${primaries[$s]}
+   currentnight=`date +%Y%m%d`
+   #checknight=`echo " $currentnight - $transferdelay " | bc -l`
+   checknight=`date +%Y%m%d --date="-${transferdelay}day"`
+   
+   setstatus "start" 
+
+   auxpath=$auxdata"/"`echo $night | cut -c 1-4`/`echo $night | cut -c 5-6`/`echo $night | cut -c 7-8`
+   ratesfile=$auxpath/$night".FTM_CONTROL_TRIGGER_RATES.fits"
+
+   test -e $ratesfile 
+   check1=$?
+   case $check1 in
+      0)   printprocesslog "INFO found "$ratesfile
+           ;;
+      *)   if [ $night -lt $checknight ]
+           then 
+              printprocesslog "WARN "$ratesfile" missing (check1="$check1")."
+              check=$check1
+           else
+              check="no"
+              printprocesslog "INFO "$ratesfile" missing (check1="$check1")."
+           fi
+           ;;
+   esac
+
+   setstatus "stop" 
+done
+
+finish 
+
Index: branches/trigger_burst_research/Processing/CheckRawData.sh
===================================================================
--- branches/trigger_burst_research/Processing/CheckRawData.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/CheckRawData.sh	(revision 18288)
@@ -0,0 +1,401 @@
+#!/bin/bash
+
+# completely new version of the script 
+# running on newdaq to fill the information as fast as possible
+# ftools is not used in this script anymore
+
+# todo
+# tempfile for fitsdump -h output
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with options doupdate="$doupdate #" and skipmd5sum="$skipmd5sum
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# check if paths are available
+if ! ls $rawdata >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$rawdata" is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights if hour between 7 and 19h, else only current night
+   getdates 6 7 19
+fi
+
+# do check for rawfiles of these dates
+for date in ${dates[@]}
+do 
+   # get runnumber from date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   
+   night=
+   query="SELECT fNight FROM AuxDataInsertStatus WHERE fNight="$runnumber
+   night=`sendquery`
+   if [ "$night" == "" ] && ls $rawdata/$date >/dev/null 2>&1
+   then
+      printprocesslog "INFO insert "$runnumber" to AuxDataInsertStatus"
+      query="INSERT AuxDataInsertStatus SET fNight="$runnumber", fPriority="$runnumber
+      sendquery >/dev/null 2>&1
+   fi
+   rawdir=$rawdata/$date
+   # check if data are available from that night
+   if ! [ -d $rawdir ]
+   then
+      printprocesslog "INFO "$rawdir" does not exist."
+      continue
+   else
+      printprocesslog "INFO processing "$rawdir"..."
+   fi
+
+   # find all fits.gz files starting with the oldest file
+   printprocesslog "INFO finding files to be checked in $rawdir..."
+   #fitsfiles=`ls $rawdir/*.fits 2>/dev/null | sort `
+   fitsfiles=( `find $rawdir -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort ` )
+
+   # loop to check files
+   for file in ${fitsfiles[@]}
+   do
+      printprocesslog "INFO checking file "$file
+      #echo "INFO checking file "$file
+      filenotclosed="no"
+
+      # check if it is drs file
+      isdrs=`ls $file | grep drs`
+      if [ "$isdrs" != "" ]
+      then
+         printprocesslog "INFO "$file" is a drs file. -> continue"
+         continue
+      fi
+
+      # get run and file number form filename
+      numberfromname=`echo $file | grep -E -o '20[1-9][0-9][01][0-9][0-3][0-9]_[0-9]{3}'`
+      runnumberfromname=`echo $numberfromname | cut -d_ -f1`
+      filenumberfromname=`echo $numberfromname | cut -d_ -f2 | sed -e 's/^0//g' -e 's/^0//g'`
+      if [ "$runnumber" != "$runnumberfromname" ]
+      then
+         printprocesslog "ERROR for file "$file": runnumber from date ("$runnumber") and filename ("$runnumberfromname") don't agree."
+      fi
+      
+      # check if entry already exists
+      query="SELECT fNight FROM RunInfo WHERE Concat(fNight, '_', LPAD(fRunID, 3, 0))='"$numberfromname"'"
+      printprocesslog "DEBUG check if entry already exists in DB. QUERY: "$query
+      #result3=`/usr/bin/mysql -u operator --host=fact01.fact.local --password=$password FactData -e "$query3"`
+      result3=`sendquery`
+
+      # only proceed with file 
+      #  if information is not yet in database
+      #  and no update is wished ($doupdate)
+      if [ "$result3" != "" ] && [ "$doupdate" == "no" ]
+      then 
+         printprocesslog "INFO "$file" has been inserted already. -> continue "
+         continue
+      fi
+      
+      runtype=
+      # check if file was closed properly
+      if [ "`echo $file | grep -o drs`" == "drs" ]
+      then
+         nondrsfile=`echo $file | sed -e 's/[.]drs//g'`".fz" # fixme: maybe more flexible
+         tstop=`$factpath/fitsdump -h $nondrsfile  2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
+         checkstatus=$?
+      else
+         tstop=`$factpath/fitsdump -h $file  2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
+         checkstatus=$?
+      fi
+      #if [ $checkstatus -gt 0 ]
+      #then
+      #   echo "WARN: getting tstop for file "$file" failed: tstop: -"$tstop"-"`date`
+      #fi
+      if [ "$tstop" == "0" ] || [ "$tstop" == "" ]
+      then
+         fileaccessed=`find $file -amin -30`
+         if ! [ "$fileaccessed" == "" ]
+         then
+            printprocesslog "INFO "$file" was accessed in the last 30 minutes => continue"
+            continue
+         else
+            printprocesslog "WARN "$file" has empty TSTOP but was not touched for 30 minutes"
+            filenotclosed="yes"
+         fi
+      fi
+
+      numdrsfiles=
+      step=
+      if [ "$filenotclosed" == "no" ]
+      then 
+         # get run and file number from file
+         runnumberfromfile=`$factpath/fitsdump -h $file 2>/dev/null | grep NIGHT | grep -E -o '20[1-9][0-9][01][0-9][0-3][0-9]'`
+         filenumberfromfileorig=`$factpath/fitsdump -h $file 2>/dev/null | grep RUNID | grep -E -o '[0-9]{1,3}'`
+         if [ "$runnumberfromfile" = "" ] || [ "$filenumberfromfileorig" = "" ]
+         then
+            printprocesslog "ERROR couldn't get run or file number from file name ("$file")."
+         fi
+         numberfromfile=$runnumberfromfile"_"`printf %03d $filenumberfromfileorig`
+         # compare numbers
+         if [ "$numberfromfile" != "$numberfromname" ]
+         then
+            printprocesslog "ERROR for file "$file": number from filename ("$numberfromname") and file ("$numberfromfile") don't agree."
+         fi
+      
+         # get checksums from header
+         checksum=`$factpath/fitsdump -h $file  2>/dev/null | grep CHECKSUM | grep -E -o '[a-zA-Z0-9]{16}'`
+         if [ "$checksum" == "" ]
+         then 
+            printprocesslog "WARN checksum for file "$file" is empty."
+         fi
+         datasum=`$factpath/fitsdump -h $file  2>/dev/null | grep DATASUM | grep -E -o '[0-9]{1,10}'`
+         if [ "$datasum" == "" ]
+         then 
+            printprocesslog "WARN datasum for file "$file" is empty."
+         fi
+         
+         # check if this run has drs file
+         #   in case file is available, get STEP from header
+         # in the very beginning only drs-files were existing
+         # in the beginning the keywords DRSCALIB and STEP were not existing
+         drsfile=`echo $file | sed -e 's/fits.fz/drs.fits/'`
+         numdrsfiles=`ls $drsfile 2>/dev/null | wc -l`
+         drscalib=`$factpath/fitsdump -h $file  2>/dev/null | grep DRSCALIB | grep -E -o "[\ ][TF][\ ]" | sed -e "s/\ //g"`
+         if [ "$drscalib" == "T" ]
+         then 
+            step=`$factpath/fitsdump -h $file  2>/dev/null | grep DRSSTEP | grep -E -o "[\ ][012][\ ]" | sed -e "s/\ //g"`
+            stepfromdrs=`$factpath/fitsdump -h $drsfile  2>/dev/null | grep STEP | grep -E -o "[\ ][012][\ ]?" | sed -e "s/\ //g"`
+            if [ "$stepfromdrs" != "$step" ]
+            then
+               printprocesslog "ERROR for file "$file" step from drsfile ("$stepfromdrs") and from file ("$step") do not agree."
+               if [ "$stepfromdrsfile" != "" ] && [ "$step" == "" ]
+               then 
+                  step=$stepfromdrsfile
+                  printprocesslog "WARN setting drsstep from drsfile ("$stepfromdrs") although value differs from the one in file "$file"."
+               fi
+            fi
+            if ! [ $numdrsfiles -eq 1 ]
+            then 
+               printprocesslog "ERROR for file "$file" number of drsfiles ("$numdrsfiles") and information from header ("$drscalib") don't agree."
+            fi
+            if [ "$step" = "" ]
+            then 
+               printprocesslog "ERROR file "$file" has drsfiles ("$numdrsfiles"), but step ("$step") is empty."
+            fi
+         else
+            if ! [ "$drscalib" == "F" ]
+            then
+               printprocesslog "WARN for file "$file" DRSCALIB is neither T nor F."
+            fi
+         fi
+         
+         # get other variables from header 
+         runtype=`$factpath/fitsdump -h $file  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z0-9._-]+[']" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"`
+         roi=`$factpath/fitsdump -h $file  2>/dev/null | grep NROI | grep -v NROITM | grep -E -o '[0-9]{1,4}'`
+         roitm=`$factpath/fitsdump -h $file  2>/dev/null | grep NROITM | grep -E -o '[0-9]{1,4}'`
+         numevents=`$factpath/fitsdump -h $file  2>/dev/null | grep Events | grep -E -o '[0-9]+'`
+         numphys=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRG ' | grep -E -o '[0-9]+'`
+         numext1=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRGEXT1' | grep -E -o '[ ][0-9]+[ ]' | sed -e 's/\ //g'`
+         numext2=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRGEXT2' | grep -E -o '[ ][0-9]+[ ]' | sed -e 's/\ //g'`
+         numelp=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRGLPE' | grep -E -o '[0-9]+'`
+         numilp=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRGLPI' | grep -E -o '[0-9]+'`
+         numoth=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRGMISC' | grep -E -o '[0-9]+'`
+         numped=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRGPED' | grep -E -o '[0-9]+'`
+         numtime=`$factpath/fitsdump -h $file  2>/dev/null | grep 'NTRGTIM' | grep -E -o '[0-9]+'`
+         compiled=`$factpath/fitsdump -h $file  2>/dev/null | grep 'COMPILED' | grep -E -o "['][a-zA-Z]+[ ][ 12][0-9][ ]20[0-9][0-9][ ][0-2][0-9]:[0-5][0-9]:[0-5][0-9][']" | sed -e "s/'//g"`
+         if ! [ "$compiled" == "" ]
+         then 
+            compiletime=`date +'%F %H:%M:%S' --date="${compiled}" `
+         else
+            compiletime=
+         fi
+         revnum=`$factpath/fitsdump -h $file  2>/dev/null | grep 'REVISION' | grep -E -o "['][0-9]+[:]?[0-9]*[MSP]*[']" | sed -e "s/'//g"`
+         # in newest data start time is in DATE-OBS
+         # in older data start time is in TSTART
+         # in the beginning TSTART was empty
+         runstart=`$factpath/fitsdump -h $file  2>/dev/null | grep DATE-OBS | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         runstart2=`$factpath/fitsdump -h $file  2>/dev/null | grep TSTART | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         if [ "$runstart" == ""  ]
+         then
+            if [ "$runstart2" == ""  ]
+            then
+               # for very old data, but gives problems for newer data
+               #runstart=`$factpath/fitsdump -h $file  2>/dev/null | grep DATE | grep -v 'DATE-' | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+               runstart="0000-00-00 00:00:00"
+            else
+               runstart=$runstart2
+            fi
+         fi
+         # in newest data start time is in DATE-END
+         # in older data start time is in TSTOP
+         # in the beginning TSTOP was empty
+         runstop=`$factpath/fitsdump -h $file  2>/dev/null | grep DATE-END | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         runstop2=`$factpath/fitsdump -h $file  2>/dev/null | grep TSTOP | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         if [ "$runstop" == ""  ]
+         then
+            if [ "$runstop2" == ""  ]
+            then
+               # for very old data, but gives problems for newer data
+               #runstop=`stat $file  2>/dev/null | grep Modify | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9][ ][0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{9}'`
+               runstop="0000-00-00 00:00:00"
+            else
+               runstop=$runstop2
+            fi
+         fi
+      fi
+      
+      # set runtype to 'unknown', if no runtype could be retrieved from file
+      if [ "$runtype" == "" ]
+      then
+         runtype="n/a"
+      fi
+      # on 15.11.2011 the runtypes had different names
+      if [ "$date" == "2011/11/15" ]
+      then
+         if [ "$runtype" == "drs-calib" ]
+         then
+            runtype="drs-gain"
+         fi
+         if [ "$runtype" == "drs-time-calib" ]
+         then
+            runtype="drs-time"
+         fi
+         if [ "$runtype" == "pedestal" ]
+         then
+            runtype="drs-pedestal"
+         fi
+         if [ "$runtype" == "light-pulser" ]
+         then
+            runtype="light-pulser-ext"
+         fi
+         if [ "$runtype" == "pedestal-on" ]
+         then
+            runtype="pedestal"
+         fi
+      fi
+      # get runtype
+      query="SELECT fRunTypeKEY FROM RunType WHERE fRunTypeName='"$runtype"'"
+      printprocesslog "DEBUG get run type from DB. QUERY:"$query
+      result2=( `sendquery` )
+      if [ ${#result2} -eq 0 ]
+      then 
+         printprocesslog "ERROR "$numberfromname": Could not query fRunTypeKey for runtype "$runtype" ."
+         continue
+      fi
+
+      # insert or update depending on whether run exists
+      if [ "$result3" == "" ]
+      then 
+         query="INSERT"
+         querymid=" fNight="$runnumber", fRunID="$filenumberfromname", "
+         querystop=
+      else
+         query="UPDATE"
+         querymid=
+         querystop=" WHERE fNight="$runnumber" AND fRunID="$filenumberfromname
+      fi
+      query=$query" RunInfo SET "$querymid" fRunTypeKey="${result2[0]}
+      if [ "$filenotclosed" == "no" ]
+      then 
+         query=$query", fRunStart='"$runstart"', fRunStop='"$runstop"'"
+         if [ "$numevents" != "" ]
+         then
+            query=$query", fNumEvents="$numevents
+         fi
+         if [ "$roi" != "" ]
+         then
+            query=$query", fROI="$roi
+         fi
+         if [ "$roitm" != "" ]
+         then
+            query=$query", fROITimeMarker="$roitm
+         fi
+         if [ "$numphys" != "" ]
+         then
+            query=$query", fNumPhysicsTrigger="$numphys
+         fi
+         if [ "$numext1" != "" ]
+         then
+            query=$query", fNumExt1Trigger="$numext1
+         fi
+         if [ "$numext2" != "" ]
+         then
+            query=$query", fNumExt2Trigger="$numext2
+         fi
+         if [ "$numelp" != "" ]
+         then
+            query=$query", fNumELPTrigger="$numelp
+         fi
+         if [ "$numilp" != "" ]
+         then
+            query=$query", fNumILPTrigger="$numilp
+         fi
+         if [ "$numped" != "" ]
+         then
+            query=$query", fNumPedestalTrigger="$numped
+         fi
+         if [ "$numtime" != "" ]
+         then
+            query=$query", fNumTimeTrigger="$numtime
+         fi
+         if [ "$numoth" != "" ]
+         then
+            query=$query", fNumOtherTrigger="$numoth
+         fi
+      fi
+      if [ "$checksum" != "" ]
+      then
+         query=$query", fCheckSum='"$checksum"'"
+      fi
+      if [ "$datasum" != "" ]
+      then
+         query=$query", fDataSum='"$datasum"'"
+      fi
+      if [ "$numdrsfiles" != "" ]
+      then
+         query=$query", fHasDrsFile="$numdrsfiles
+      fi
+      if [ "$step" != "" ]
+      then
+         query=$query", fDrsStep="$step
+      fi
+      if [ "$compiletime" != "" ]
+      then
+         query=$query", fCompileTime='"$compiletime"'"
+      fi
+      if [ "$revnum" != "" ]
+      then
+         query=$query", fRevisionNumber='"$revnum"'"
+      fi
+      #query=$query", fFitsFileErrors="$numfitserrors
+      query=$query" "$querystop
+      # send query to DB
+      #echo $query
+      sendquery >/dev/null
+   done
+done
+
+finish
+
Index: branches/trigger_burst_research/Processing/CheckRawFilesAvail.sh
===================================================================
--- branches/trigger_burst_research/Processing/CheckRawFilesAvail.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/CheckRawFilesAvail.sh	(revision 18288)
@@ -0,0 +1,99 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=CheckRawFilesAvail
+step=RawFileAvailISDC
+transferdelay=3 #days
+failsdir=/archive/fact/fails/raw
+
+set -C
+
+# check if script is already running
+lockfile=$lockpath/lock-$program.txt
+checklock 
+
+# get todo list
+gettodo 
+
+
+for (( s=0 ; s < $num ; s++ ))
+do
+   night=${primaries[$s+$s]}
+   runid=${primaries[$s+$s+1]}
+   currentnight=`date +%Y%m%d`
+   checknight=`echo " $currentnight - $transferdelay " | bc -l`
+   
+   setstatus "start" 
+
+   night2=`echo $night | cut -c 1-4`/`echo $night | cut -c 5-6`/`echo $night | cut -c 7-8`
+   rawfile=$rawdata/$night2/$night"_"`printf %03d $runid`".fits.fz"
+   failsfile=$failsdir/$night2/$night"_"`printf %03d $runid`".fits.fz"
+   
+   test -e $rawfile 
+   check1=$?
+
+   query="SELECT fHasDrsFile from RunInfo WHERE fNight="$night" AND fRunID="$runid
+   numdrs=`sendquery`
+   if [ $numdrs -gt 0 ]
+   then 
+      drsfile=$rawdata/$night2"/"$night"_"`printf %03d $runid`".drs.fits.gz"
+      failsdrsfile=$failsdir/$night2"/"$night"_"`printf %03d $runid`".drs.fits.gz"
+      test -e $drsfile
+      check3=$?
+   fi
+   
+   case $check1 in
+      0)   printprocesslog "INFO found rawfile "$rawfile
+           if [ $numdrs -gt 0 ]
+           then 
+              case $check3 in 
+                 0) printprocesslog "INFO found drsfile "$drsfile
+                    ;;
+                 *) test -e $failsdrsfile
+                    check4=$?
+                    case $check4 in
+                       0)   printprocesslog "INFO found rawfile in "$failsdrsfile
+                            # drs file is not in archive for files which are ok
+                            check=$check4
+                            ;;
+                       *)   # print warning only for files which are older than $transferdelay days
+                            if [ $night -lt $checknight ]
+                            then 
+                               printprocesslog "WARN "$drsfile" and "$failsdrsfile" missing."
+                            else
+                               printprocesslog "INFO "$drsfile" and "$failsdrsfile" missing."
+                            fi
+                            check="no"
+                            ;;
+                    esac
+              esac
+           fi
+           ;;
+      *)   test -e $failsfile
+           check2=$?
+           
+           case $check2 in 
+              0)   printprocesslog "INFO found rawfile in "$failsfile
+                   # raw file is not in archive for files which are ok
+                   check=$check2
+                   ;;
+              *)   # print warning only for files which are older than $transferdelay days
+                   if [ $night -lt $checknight ]
+                   then 
+                      printprocesslog "WARN "$rawfile" and "$failsfile" missing."
+                   else
+                      printprocesslog "INFO "$rawfile" and "$failsfile" missing."
+                   fi
+                   check="no"
+                   ;;
+           esac
+           ;;
+   esac
+
+   setstatus "stop" 
+done
+
+finish 
+
Index: branches/trigger_burst_research/Processing/FillAuxCamHum.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillAuxCamHum.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillAuxCamHum.sh	(revision 18288)
@@ -0,0 +1,179 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillAusCamHum-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+#echo ${dates[@]}
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   if [ $runnumber -lt 20120328 ]
+   then
+      continue
+   fi
+   #echo $auxdir" @ "`date` 
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fCamHumidityMean) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   fschumfile=$auxdir/$runnumber.FSC_CONTROL_HUMIDITY.fits
+   if ! [ -e $fschumfile ]
+   then 
+      printprocesslog "WARN "$fschumfile" not found."
+      #echo "WARN "$fschumfile" not found."
+   else
+      humnumerrors=`fverify $fschumfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $humnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for "$fschumfile" fverify returned "$humnumerrors" error(s)."
+      fi
+   fi
+
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      #checkfitsfile=`fverify $rawfile  2>/dev/null | grep '0 error(s)'`
+      #if [ "$checkfitsfile" == "" ]
+      #then
+      #   numfitserrors=1
+      #   printprocesslog "WARN: "$rawfile" probably corrupted."
+      #   continue
+      #fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get information from fsc: T[31]
+      if [ -e $fschumfile ] && [ $humnumerrors -eq 0 ]
+      then 
+         hums=( `root -q -b -l fact/processing/camhum.C\("\"$fschumfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         if [ "${hums[0]}" == "" ]
+         then 
+            query=$query"fCamHumidityMean=NULL"
+         else
+            query=$query"fCamHumidityMean="${hums[0]}
+         fi
+      else
+         query=$query" fCamHumidityMean=NULL"
+      fi
+      #fCameraTempMeanRms: mean of rms of single sensors
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      
+      #echo $query
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillAuxContTemp.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillAuxContTemp.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillAuxContTemp.sh	(revision 18288)
@@ -0,0 +1,171 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillContTemp-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   # the container temperature was not available before this date
+   if [ $runnumber -lt 20130413 ]
+   then
+      printprocesslog "INFO container temperature was not available before 20130413 "
+      continue
+   fi
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fContainerTempMean) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+
+   # get daily fits files 
+   conttempfile=$auxdir/$runnumber.TEMPERATURE_DATA.fits
+   if ! [ -e $conttempfile ]
+   then 
+      printprocesslog "WARN "$conttempfile" not found."
+      continue
+   else
+      conttempnumerrors=`fverify $conttempfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $conttempnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $conttempfile fverify returned "$conttempnumerrors" error(s)."
+      fi
+   fi
+   
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+      #echo "run "$filenum" "$tstart" "$tstop
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get ctrl dev
+      if [ -e $conttempfile ] && [ $conttempnumerrors -eq 0 ]
+      then 
+         conttemps=( `root -q -b -l fact/processing/contemp.C\("\"$conttempfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         #echo " "${conttemps[@]}
+         if [ "${conttemps[0]}" == "" ]
+         then 
+            query=$query" fContainerTempMean=NULL "
+         else
+            query=$query" fContainerTempMean="${conttemps[0]}
+         fi
+      else
+         query=$query" fContainerTempMean=NULL"
+      fi
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillAuxCtrDev.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillAuxCtrDev.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillAuxCtrDev.sh	(revision 18288)
@@ -0,0 +1,175 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillCtrlDev-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   if [ $runnumber -lt 20111115 ]
+   then
+      continue
+   fi
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fCtrlDevMean) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   # get daily fits files 
+   trackingfile=$auxdir/$runnumber.DRIVE_CONTROL_TRACKING_POSITION.fits
+   if ! [ -e $trackingfile ]
+   then 
+      printprocesslog "WARN "$trackingfile" not found."
+      #echo "WARN "$trackingfile" not found."
+      continue
+   else
+      tracknumerrors=`fverify $trackingfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $tracknumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $trackingfile fverify returned "$tracknumerrors" error(s)."
+      fi
+   fi
+   
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get ctrl dev
+      if [ -e $trackingfile ] && [ $tracknumerrors -eq 0 ]
+      then 
+         ctrldevs=( `root -q -b -l fact/processing/ctrldev.C\("\"$trackingfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         if [ "${ctrldevs[0]}" == "" ]
+         then 
+            query=$query"fCtrlDevMean=NULL"
+         else
+            query=$query"fCtrlDevMean="${ctrldevs[0]}
+         fi
+         if [ "${ctrldevs[1]}" == "" ]
+         then 
+            query=$query", fCtrlDevRms=NULL"
+         else
+            query=$query", fCtrlDevRms="${ctrldevs[1]}
+         fi
+      else
+         query=$query" fCtrlDevMean=NULL"
+         query=$query", fCtrlDevRms=NULL"
+      fi
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillAuxCurrents.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillAuxCurrents.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillAuxCurrents.sh	(revision 18288)
@@ -0,0 +1,287 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillCurrents-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   
+   #if [ $runnumber -lt 20130301 ]
+   #then
+   #   continue
+   #fi
+   
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fCurrentsMedMean) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+
+   biasvoltagefile=$auxdir/$runnumber.BIAS_CONTROL_VOLTAGE.fits
+   if ! [ -e $biasvoltagefile ]
+   then 
+      printprocesslog "WARN "$biasvoltagefile" not found."
+      continue
+   else
+      biasnumerrors=`fverify $biasvoltagefile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $biasnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $biasvoltagefile fverify returned "$biasnumerrors" error(s)."
+      fi
+   fi
+
+   biascurrentfile=$auxdir/$runnumber.BIAS_CONTROL_CURRENT.fits
+   if ! [ -e $biascurrentfile ]
+   then 
+      printprocesslog "WARN "$biascurrentfile" not found."
+      continue
+   else
+      biascurrnumerrors=`fverify $biascurrentfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $biascurrnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $biascurrentfile fverify returned "$biascurrnumerrors" error(s)."
+      fi
+   fi
+
+   # this file is needed for the calibration of the currents
+   feedbackcalfile=$auxdir/$runnumber.FEEDBACK_CALIBRATION.fits
+   if ! [ -e $feedbackcalfile ]
+   then 
+      printprocesslog "WARN "$feedbackcalfile" not found."
+      continue
+   else
+      feedbacknumerrors=`fverify $feedbackcalfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $feedbacknumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $feedbackcalfile fverify returned "$feedbacknumerrors" error(s)."
+      fi
+   fi
+
+   calcurrentsfile=$auxdir/$runnumber.FEEDBACK_CALIBRATED_CURRENTS.fits
+   if ! [ -e $calcurrentsfile ]
+   then 
+      calcurrentsfile=/gpfs/scratch/fact/calibrated_currents/$runnumber.CALIBRATED_CURRENTS.fits
+      if ! [ -e $calcurrentsfile ]
+      then 
+         printprocesslog "INFO run calibrate.C for night "$runnumber >> $logfile 2>&1
+         root -q -b -l fact/processing/calibrate.C\($runnumber\)
+      fi
+   fi
+   printprocesslog "INFO using calibrated currents from file "$calcurrentsfile
+
+   #calcurrentsfile=$auxdir/$runnumber.FEEDBACK_CALIBRATED_CURRENTS.fits
+   #calcurrentsfile=/scratch_nfs/calibrated_currents/$runnumber.CALIBRATED_CURRENTS.fits
+   #calcurrentsfile=/gpfs/scratch/fact/calibrated_currents/$runnumber.CALIBRATED_CURRENTS.fits
+   if ! [ -e $calcurrentsfile ]
+   then 
+      printprocesslog "WARN "$calcurrentsfile" not found."
+      continue
+   else
+      calnumerrors=`fverify $calcurrentsfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $calnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $calcurrentsfile fverify returned "$calnumerrors" error(s)."
+      fi
+   fi
+   
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get information from fsc: T[31]
+      if [ -e $calcurrentsfile ] && [ $calnumerrors -eq 0 ]
+      then 
+         #root -q -b -l fact/curavg.C\("\"$calcurrentsfile\""\,$tstart\,$tstop\) # | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'
+         #currents=( `root -q -b -l fact/curavg.C\("\"$calcurrentsfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         currents=( `root -q -b -l fact/processing/currents.C\("\"$calcurrentsfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[-]?[0-9]+[.]?[0-9]*'` )
+         if [ "${currents[0]}" == "" ]
+         then 
+            query=$query"fCurrentsMedMean=NULL"
+         else
+            query=$query"fCurrentsMedMean="${currents[0]}
+         fi
+         if [ "${currents[1]}" == "" ]
+         then 
+            query=$query", fCurrentsMedRms=NULL"
+         else
+            query=$query", fCurrentsMedRms="${currents[1]}
+         fi
+         if [ "${currents[2]}" == "" ]
+         then 
+            query=$query", fCurrentsDevMean=NULL"
+         else
+            query=$query", fCurrentsDevMean="${currents[2]}
+         fi
+         if [ "${currents[3]}" == "" ]
+         then 
+            query=$query", fCurrentsDevRms=NULL"
+         else
+            query=$query", fCurrentsDevRms="${currents[3]}
+         fi
+         if [ "${currents[4]}" == "" ]
+         then 
+            query=$query", fCurrentsMedMeanBeg=NULL"
+         else
+            query=$query", fCurrentsMedMeanBeg="${currents[4]}
+         fi
+         if [ "${currents[5]}" == "" ]
+         then 
+            query=$query", fCurrentsMedMeanEnd=NULL"
+         else
+            query=$query", fCurrentsMedMeanEnd="${currents[5]}
+         fi
+         if [ "${currents[6]}" == "" ]
+         then
+            query=$query", fCurrentsDiffToPrediction=NULL"
+         else
+            query=$query", fCurrentsDiffToPrediction="${currents[6]}
+         fi
+         if [ "${currents[7]}" == "" ]
+         then
+            query=$query", fCurrentsRelDiffToPrediction=NULL"
+         else
+            query=$query", fCurrentsRelDiffToPrediction="${currents[7]}
+         fi
+         if [ "${currents[8]}" == "" ]
+         then
+            query=$query", fCurrentsLineRms=NULL"
+         else
+            query=$query", fCurrentsLineRms="${currents[8]}
+         fi
+         if [ "${currents[9]}" == "" ]
+         then
+            query=$query", fCurrentsRelLineRms=NULL"
+         else
+            query=$query", fCurrentsRelLineRms="${currents[9]}
+         fi
+      else
+         query=$query" fCurrentsMedMean=NULL"
+         query=$query", fCurrentsMedRms=NULL"
+         query=$query", fCurrentsDevMean=NULL"
+         query=$query", fCurrentsDevRms=NULL"
+         query=$query", fCurrentsMedMeanBeg=NULL"
+         query=$query", fCurrentsMedMeanEnd=NULL"
+         query=$query", fCurrentsDiffToPrediction=NULL"
+         query=$query", fCurrentsRelDiffToPrediction=NULL"
+         query=$query", fCurrentsLineRms=NULL"
+         query=$query", fCurrentsRelLineRms=NULL"
+      fi
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      
+      #echo $query
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillAuxData.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillAuxData.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillAuxData.sh	(revision 18288)
@@ -0,0 +1,634 @@
+#!/bin/bash
+
+# new version of the script to run on newdaq
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+#   doupdate="yes" # update all entries (needed when new fields have been added)
+#   doupdate="force" # needed when something with insert in La Palma did not work (adds more information)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillAuxData-"$datetime".log"
+date >> $logfile
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 3 nights if hour between 7 and 19h, else only current night
+   getdates 3 7 19
+fi
+
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+# mjd of 1970-01-01
+#   needed in this script as for 1 day the mjd in the aux files are inconsistent
+# mjdref=40587
+
+# known: 
+# 2011/11/22 MJDREF in DRIVE empty, Time > 55000
+# 2011/11/23 MJDREF in DRIVE not empty, Time > 55000
+# 2011/11/24 MJDREF in DRIVE not empty, Time > 15000
+# raw files
+# 2011/11/21 no MJDREF
+# 2011/11/22 MJDREF
+# further things: https://www.fact-project.org/logbook/showthread.php?tid=67
+
+# trigger rate has as first value -1, but with using the median it should be fine
+
+
+function evaluatestatistics()
+{
+   # $1 variable name
+   # $@ statistics
+   if [ "$2" = "" ]
+   then
+      printprocesslog "WARN couldn't get statistics from file $1 for run "$date" "$file
+      continue
+   fi
+   min=
+   mean=
+   med=
+   max=
+   rms=
+   evaluation=`echo $@ | grep -E -o '\['${1}':0[:]?[0-9]*\]\ Min:\ [-]?[0-9]+[.]?[0-9]*\ Max:\ [-]?[0-9]+[.]?[0-9]*\ Med:\ [-]?[0-9]+[.]?[0-9]*\ Avg:\ [-]?[0-9]+[.]?[0-9]*\ Rms:\ [-]?[0-9]+[.]?[0-9]*[e]?[-]?[0-9]*'`
+   if [ "$evaluation" = "" ]
+   then
+      printprocesslog "WARN empty evaluation of statistic ("$@") for run "$date" "$file
+      #echo "   ---> "$@
+   fi
+   #echo "ev: "$evaluation
+   min=`echo $evaluation | grep -E -o 'Min:\ [-]?[0-9]+[.]?[0-9]*' | sed -e 's/Min:\ //'`
+   max=`echo $evaluation | grep -E -o 'Max:\ [-]?[0-9]+[.]?[0-9]*' | sed -e 's/Max:\ //'`
+   med=`echo $evaluation | grep -E -o 'Med:\ [-]?[0-9]+[.]?[0-9]*' | sed -e 's/Med:\ //'`
+   mean=`echo $evaluation | grep -E -o 'Avg:\ [-]?[0-9]+[.]?[0-9]*' | sed -e 's/Avg:\ //'`
+   rms=`echo $evaluation | grep -E -o 'Rms:\ [-]?[0-9]+[.]?[0-9]*[e]?[-]?[0-9]+' | sed -e 's/Rms:\ //'`
+   #echo "eval: "$min" "$max" "$med" "$mean" "$rms
+}
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   rawdir=$rawdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+      #echo "INFO processing files in "$auxdir
+   fi
+
+   # check if raw files are available from that night
+   #   only needed to get start/stop time
+   #   might be removed once the start/stop time comes from the DB
+   if ! [ -d $rawdir ]
+   then
+      printprocesslog "INFO no data available in "$rawdir" -> continue"
+      continue
+   fi
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   # as aux files are written only once a minute, select only files which are older than 1.5 minutes
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 AND NOT ISNULL(fRunStop) AND fRunStop < SUBTIME(UTC_TIMESTAMP(), \"00:01:30\")"
+   # only runs which are not yet filled 
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fRightAscension) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   # get daily fits files 
+   trackingfile=$auxdir/$runnumber.DRIVE_CONTROL_TRACKING_POSITION.fits
+   if ! [ -e $trackingfile ]
+   then 
+      printprocesslog "WARN "$trackingfile" not found."
+   fi
+   
+   sourceposfile=$auxdir/$runnumber.DRIVE_CONTROL_SOURCE_POSITION.fits
+   if ! [ -e $sourceposfile ]
+   then 
+      printprocesslog "WARN "$sourceposfile" not found."
+   else
+      sourceposfiletstarti=`$factpath/fitsdump -h $sourceposfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      sourceposfiletstartf=`$factpath/fitsdump -h $sourceposfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      if [ $sourceposfiletstarti -gt 30000 ]
+      then 
+         sourceposfiletstart=`echo " $sourceposfiletstarti + $sourceposfiletstartf - 40587 " | bc -l`
+      else
+         sourceposfiletstart=`echo " $sourceposfiletstarti + $sourceposfiletstartf " | bc -l`
+      fi
+   fi
+   
+   triggerratefile=$auxdir/$runnumber.FTM_CONTROL_TRIGGER_RATES.fits
+   if ! [ -e $triggerratefile ]
+   then 
+      printprocesslog "WARN "$triggerratefile" not found."
+   fi
+   
+   thresholdfile=$auxdir/$runnumber.FTM_CONTROL_STATIC_DATA.fits
+   if ! [ -e $thresholdfile ]
+   then 
+      printprocesslog "WARN "$thresholdfile" not found."
+   fi
+   
+   biasvoltagefile=$auxdir/$runnumber.BIAS_CONTROL_VOLTAGE.fits
+   if ! [ -e $biasvoltagefile ]
+   then 
+      printprocesslog "WARN "$biasvoltagefile" not found."
+   fi
+
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum`
+ 
+      # get information from rawfile
+      rawfile=`ls $rawdir/$runnumber"_"\`printf %03d $filenum\`.fits*`
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+
+      #runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      #mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      echo $runtype
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+      #echo $tstart
+      #echo $tstop
+      #if [ $runnumber -eq 20111123 ] 
+      #then 
+      #   # add mjdref for days were aux files were inconsistent
+      #   tstart=`echo " $tstart + $mjdref " | bc -l`
+      #   tstart2=`echo " $tstart2 + $mjdref " | bc -l`
+      #   tstop=`echo " $tstop + $mjdref " | bc -l`
+      #fi
+
+      # get information from source_pos file
+      if [ -e $sourceposfile ] 
+      then 
+         sourcename=`$factpath/fitsdump ${sourceposfile} -c Time -c Name --filter='[1]<'${tstop}' && [1]>'${sourceposfiletstart}  2>/dev/null | tail -1 2>&1 | grep -o -E "['][a-zA-Z0-9\ \.\+\-]+[']" | sed -e "s/'//g"`
+         if [ "$sourcename" == "" ]
+         then
+            printprocesslog "INFO couldn't get sourcename ("$sourcename") from "$sourceposfile" for "$runnumber"_"$filenum
+         else 
+            query="SELECT fSourceKey FROM Source WHERE fSourceName='"$sourcename"'"
+            sourcekey=`sendquery`
+            if [ "$sourcename" == "" ]
+            then
+               printprocesslog "WARN couldn't get sourcekey for source "$sourcename" from DB for "$runnumber"_"$filenum
+            fi 
+         fi
+      fi
+      if [ "$doupdate" == "force" ]
+      then
+         # set runtype to 'unknown', if no runtype could be retrieved from file
+         if [ "$runtype" == "" ]
+         then
+            runtype="n/a"
+         fi
+         # on 15.11.2011 the runtypes had different names
+         if [ "$date" == "2011/11/15" ]
+         then
+            if [ "$runtype" == "drs-calib" ]
+            then
+               runtype="drs-gain"
+            fi
+            if [ "$runtype" == "drs-time-calib" ]
+            then
+               runtype="drs-time"
+            fi
+            if [ "$runtype" == "pedestal" ]
+            then
+               runtype="drs-pedestal"
+            fi
+            if [ "$runtype" == "light-pulser" ]
+            then
+               runtype="light-pulser-ext"
+            fi
+            if [ "$runtype" == "pedestal-on" ]
+            then
+               runtype="pedestal"
+            fi
+         fi
+         # get runtype
+         query="SELECT fRunTypeKEY FROM RunType WHERE fRunTypeName='"$runtype"'"
+         result2=( `sendquery` )
+         if [ ${#result2} -eq 0 ]
+         then 
+            printprocesslog "ERROR "$numberfromname": Could not query fRunTypeKey for runtype "$runtype" ."
+            continue
+         fi
+         # in newest data start time is in DATE-OBS
+         # in older data start time is in TSTART
+         # in the beginning TSTART was empty
+         #runstart=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep DATE-OBS | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         runstart=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep DATE-OBS | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         #runstart2=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep TSTART | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         runstart2=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep TSTART | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         if [ "$runstart" == ""  ]
+         then
+            if [ "$runstart2" == ""  ]
+            then
+               #runstart=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep DATE | grep -v 'DATE-' | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+               runstart=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep DATE | grep -v 'DATE-' | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+            else
+               runstart=$runstart2
+            fi
+         fi
+         # in newest data start time is in DATE-END
+         # in older data start time is in TSTOP
+         # in the beginning TSTOP was empty
+         #runstop=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep DATE-END | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         runstop=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep DATE-END | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         #runstop2=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep TSTOP | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         runstop2=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep TSTOP | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9]T[0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{6}'`
+         if [ "$runstop" == ""  ]
+         then
+            if [ "$runstop2" == ""  ]
+            then
+               runstop=`stat $rawfile  2>/dev/null | grep Modify | grep -E -o '20[1-9][0-9][\-][01][0-9][\-][0-3][0-9][ ][0-2][0-9]:[0-6][0-9]:[0-6][0-9][.][0-9]{9}'`
+            else
+               runstop=$runstop2
+            fi
+         fi
+         echo $runstart $runstop
+         #numevents=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep Events | grep -E -o '[0-9]+'`
+         numevents=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep Events | grep -E -o '[0-9]+'`
+         #roi=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep NROI | grep -v NROITM | grep -E -o '[0-9]{1,4}'`
+         roi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep NROI | grep -v NROITM | grep -E -o '[0-9]{1,4}'`
+         #roitm=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep NROITM | grep -E -o '[0-9]{1,4}'`
+         roitm=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep NROITM | grep -E -o '[0-9]{1,4}'`
+         #numphys=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRG ' | grep -E -o '[0-9]+'`
+         numphys=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRG ' | grep -E -o '[0-9]+'`
+         #numext1=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRGEXT1' | grep -E -o '[ ][0-9]+[ ]' | sed -e 's/\ //g'`
+         numext1=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRGEXT1' | grep -E -o '[ ][0-9]+[ ]' | sed -e 's/\ //g'`
+         #numext2=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRGEXT2' | grep -E -o '[ ][0-9]+[ ]' | sed -e 's/\ //g'`
+         numext2=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRGEXT2' | grep -E -o '[ ][0-9]+[ ]' | sed -e 's/\ //g'`
+         #numelp=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRGLPE' | grep -E -o '[0-9]+'`
+         numelp=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRGLPE' | grep -E -o '[0-9]+'`
+         #numilp=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRGLPI' | grep -E -o '[0-9]+'`
+         numilp=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRGLPI' | grep -E -o '[0-9]+'`
+         #numoth=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRGMISC' | grep -E -o '[0-9]+'`
+         numoth=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRGMISC' | grep -E -o '[0-9]+'`
+         #numped=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRGPED' | grep -E -o '[0-9]+'`
+         numped=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRGPED' | grep -E -o '[0-9]+'`
+         #numtime=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'NTRGTIM' | grep -E -o '[0-9]+'`
+         numtime=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'NTRGTIM' | grep -E -o '[0-9]+'`
+         #compiled=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'COMPILED' | grep -E -o "['][a-zA-Z]+[ ][ 12][0-9][ ]20[0-9][0-9][ ][0-2][0-9]:[0-5][0-9]:[0-5][0-9][']" | sed -e "s/'//g"`
+         compiled=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'COMPILED' | grep -E -o "['][a-zA-Z]+[ ][ 12][0-9][ ]20[0-9][0-9][ ][0-2][0-9]:[0-5][0-9]:[0-5][0-9][']" | sed -e "s/'//g"`
+         if ! [ "$compiled" == "" ]
+         then 
+            compiletime=`date +'%F %H:%M:%S' --date="${compiled}" `
+         else
+            compiletime=
+         fi
+         #revnum=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep 'REVISION' | grep -E -o "['][0-9]+[:]?[0-9]*[MSP]*[']" | sed -e "s/'//g"`
+         revnum=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'REVISION' | grep -E -o "['][0-9]+[:]?[0-9]*[MSP]*[']" | sed -e "s/'//g"`
+         # get checksums from header
+         #checksum=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep CHECKSUM | grep -E -o '[a-zA-Z0-9]{16}'`
+         checksum=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep CHECKSUM | grep -E -o '[a-zA-Z0-9]{16}'`
+         if [ "$checksum" == "" ]
+         then 
+            printprocesslog "WARN checksum for file "$rawfile" is empty."
+         fi
+         #datasum=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep DATASUM | grep -E -o '[0-9]{1,10}'`
+         datasum=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep DATASUM | grep -E -o '[0-9]{1,10}'`
+         if [ "$datasum" == "" ]
+         then 
+            printprocesslog "WARN datasum for file "$rawfile" is empty."
+         fi
+         # check if this run has drs file
+         #   in case file is available, get STEP from header
+         # in the very beginning only drs-files were existing
+         # in the beginning the keywords DRSCALIB and STEP were not existing
+         drsfile=`echo $rawfile | sed -e 's/fits/drs.fits/'`
+         numdrsfiles=`ls $drsfile 2>/dev/null | wc -l`
+         #drscalib=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep DRSCALIB | grep -E -o "['][TF][']" | sed -e "s/'//g"`
+         drscalib=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep DRSCALIB | grep -E -o "[\ ][TF][\ ]" | sed -e "s/\ //g"`
+         if [ "$drscalib" == "T" ]
+         then 
+            #step=`$factpath/fitsdump -h -t Events $rawfile  2>/dev/null | grep DRSSTEP | grep -E -o "['][012][']" | sed -e "s/'//g"`
+            step=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep DRSSTEP | grep -E -o "[\ ][012][\ ]" | sed -e "s/\ //g"`
+            #stepfromdrs=`$factpath/fitsdump -h -t Events $drsfile  2>/dev/null | grep STEP | grep -E -o "['][012][']" | sed -e "s/'//g"`
+            stepfromdrs=`$factpath/fitsdump -h $drsfile  2>/dev/null | grep STEP | grep -E -o "[\ ][012][\ ]?" | sed -e "s/\ //g"`
+            if [ "$stepfromdrs" != "$step" ]
+            then
+               printprocesslog "ERROR for file "$rawfile" step from drsfile ("$stepfromdrs") and from file ("$step") do not agree."
+               if [ "$stepfromdrsfile" != "" ] && [ "$step" == "" ]
+               then 
+                  step=$stepfromdrsfile
+                  printprocesslog "WARN setting drsstep from drsfile ("$stepfromdrs") although value differs from the one in file "$rawfile"."
+               fi
+            fi
+            if ! [ $numdrsfiles -eq 1 ]
+            then 
+               printprocesslog "ERROR for file "$rawfile" number of drsfiles ("$numdrsfiles") and information from header ("$drscalib") don't agree."
+            fi
+            if [ "$step" = "" ]
+            then 
+               printprocesslog "ERROR file "$rawfile" has drsfiles ("$numdrsfiles"), but step ("$step") is empty."
+            fi
+         else
+            if ! [ "$drscalib" == "F" ]
+            then
+               printprocesslog "WARN for file "$rawfile" DRSCALIB is neither T nor F."
+            fi
+         fi
+      fi
+      
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+      
+      # fill source key only if available
+      if ! [ "$sourcekey" = "" ]
+      then
+         query=$query" fSourceKey="$sourcekey", "
+      else
+         query=$query" fSourceKey=NULL, "
+      fi
+      
+      # get information from tracking
+      if [ -e $trackingfile ] 
+      then 
+         # get statistics
+         trackingstats=`$factpath/fitsdump $trackingfile -s -c Time -c Ra -c Dec -c Zd -c Az --filter='[1]<'${tstop}' && [1]>'${tstart}  2>/dev/null`
+         # RA
+         evaluatestatistics "Ra" $trackingstats
+         #echo $min"_"$max"_"$
+         if [ "$evaluation" != "" ]
+         then
+            if [ "$min" == "$max" ] 
+            then
+               query=$query" fRightAscension="$mean
+            else
+               query=$query" fRightAscension=NULL"
+               printprocesslog "WARN for $rawfile RA changes within run (min: "$min", max: "$max")."
+            fi
+            # Declination
+            evaluatestatistics "Dec" $trackingstats
+            if [ "$decmin" == "$decmax" ]
+            then
+               query=$query", fDeclination="$mean
+            else
+               query=$query", fDeclination=NULL"
+               printprocesslog "WARN for $rawfile declination changes within run (min: "$min", max: "$max")."
+            fi
+         else
+            query=$query" fRightAscension=NULL"
+            query=$query", fDeclination=NULL"
+         fi
+         # Zd
+         evaluatestatistics "Zd" $trackingstats
+         if [ "$evaluation" != "" ]
+         then
+            query=$query", fZenithDistanceMin="$min
+            query=$query", fZenithDistanceMean="$mean
+            query=$query", fZenithDistanceMax="$max
+         else
+            query=$query", fZenithDistanceMin=NULL"
+            query=$query", fZenithDistanceMean=NULL"
+            query=$query", fZenithDistanceMax=NULL"
+         fi
+         # Az
+         evaluatestatistics "Az" $trackingstats
+         if [ "$evaluation" != "" ]
+         then
+            query=$query", fAzimuthMin="$min
+            query=$query", fAzimuthMean="$mean
+            query=$query", fAzimuthMax="$max
+         else
+            query=$query", fAzimuthMin=NULL"
+            query=$query", fAzimuthMean=NULL"
+            query=$query", fAzimuthMax=NULL"
+         fi
+      else
+         query=$query" fRightAscension=NULL"
+         query=$query", fDeclination=NULL"
+         query=$query", fZenithDistanceMin=NULL"
+         query=$query", fZenithDistanceMean=NULL"
+         query=$query", fZenithDistanceMax=NULL"
+         query=$query", fAzimuthMin=NULL"
+         query=$query", fAzimuthMean=NULL"
+         query=$query", fAzimuthMax=NULL"
+      fi
+   
+      # get information from trigger
+      if [ -e $triggerratefile ] 
+      then 
+         # get statistics
+         triggerstats=`$factpath/fitsdump $triggerratefile -s -c Time -c TriggerRate --filter='[1]<'${tstop}' && [1]>'${tstart}  2>/dev/null`
+         evaluatestatistics "TriggerRate" $triggerstats
+         if [ "$evaluation" != "" ]
+         then
+            query=$query", fTriggerRateMedian="$med
+         else
+            query=$query", fTriggerRateMedian=NULL"
+         fi
+      else
+         query=$query", fTriggerRateMedian=NULL"
+      fi
+      
+      # get information from thresholds
+      if [ -e $thresholdfile ] 
+      then 
+         # get statistics
+         thresholdstats=`$factpath/fitsdump $thresholdfile -s -c Time -c PatchThresh --filter='[1]<'${tstop}' && [1]>'${tstart}  2>/dev/null`
+         evaluatestatistics "PatchThresh" $thresholdstats
+         if [ "$evaluation" = "" ]
+         then
+            thresholdstats=`$factpath/fitsdump $thresholdfile -s -c Time -c PatchThresh --filter='[1]<'${tstop}' && [1]>'${tstart2}  2>/dev/null`
+            #echo "$factpath/fitsdump $thresholdfile -s -c Time -c PatchThresh --filter='[1]<'${tstop}' && [1]>'${tstart2}  2>/dev/null"
+            evaluatestatistics "PatchThresh" $thresholdstats
+         fi
+         if [ "$evaluation" != "" ]
+         then
+            query=$query", fThresholdMedian="$med
+         else
+            query=$query", fThresholdMedian=NULL"
+         fi
+      else
+         query=$query", fThresholdMedian=NULL"
+      fi
+
+      # get information from bias: U
+      if [ -e $biasvoltagefile ] 
+      then 
+         if [ $runnumber -gt 20120324 ]
+         then
+            biasstats=`$factpath/fitsdump $biasvoltagefile -s -c Time -c Uout --filter='[1]<'${tstop}' && [1]>'${tstart}  2>/dev/null`
+            evaluatestatistics "Uout" $biasstats
+            if [ "$evaluation" = "" ]
+            then
+               biasstats=`$factpath/fitsdump $biasvoltagefile -s -c Time -c Uout --filter='[1]<'${tstop}' && [1]>'${tstart2}  2>/dev/null`
+               evaluatestatistics "Uout" $biasstats
+            fi
+         else
+            biasstats=`$factpath/fitsdump $biasvoltagefile -s -c Time -c U --filter='[1]<'${tstop}' && [1]>'${tstart}  2>/dev/null`
+            evaluatestatistics "U" $biasstats
+            if [ "$evaluation" = "" ]
+            then
+               biasstats=`$factpath/fitsdump $biasvoltagefile -s -c Time -c U --filter='[1]<'${tstop}' && [1]>'${tstart2}  2>/dev/null`
+               evaluatestatistics "U" $biasstats
+            fi
+         fi
+         if [ "$evaluation" != "" ]
+         then
+            query=$query", fBiasVoltageMedian="$med
+         else
+            query=$query", fBiasVoltageMedian=NULL"
+         fi
+      else
+         query=$query", fBiasVoltageMedian=NULL"
+      fi
+      if [ "$doupdate" == "force" ]
+      then
+         query=$query", fRunStart='"$runstart"', fRunStop='"$runstop"'"
+         query=$query", fRunTypeKey="${result2[0]}
+         if [ "$numevents" != "" ]
+         then
+            query=$query", fNumEvents="$numevents
+         fi
+         if [ "$roi" != "" ]
+         then
+            query=$query", fROI="$roi
+         fi
+         if [ "$roitm" != "" ]
+         then
+            query=$query", fROITimeMarker="$roitm
+         fi
+         if [ "$numphys" != "" ]
+         then
+            query=$query", fNumPhysicsTrigger="$numphys
+         fi
+         if [ "$numext1" != "" ]
+         then
+            query=$query", fNumExt1Trigger="$numext1
+         fi
+         if [ "$numext2" != "" ]
+         then
+            query=$query", fNumExt2Trigger="$numext2
+         fi
+         if [ "$numelp" != "" ]
+         then
+            query=$query", fNumELPTrigger="$numelp
+         fi
+         if [ "$numilp" != "" ]
+         then
+            query=$query", fNumILPTrigger="$numilp
+         fi
+         if [ "$numped" != "" ]
+         then
+            query=$query", fNumPedestalTrigger="$numped
+         fi
+         if [ "$numtime" != "" ]
+         then
+            query=$query", fNumTimeTrigger="$numtime
+         fi
+         if [ "$numoth" != "" ]
+         then
+            query=$query", fNumOtherTrigger="$numoth
+         fi
+         if [ "$checksum" != "" ]
+         then
+            query=$query", fCheckSum='"$checksum"'"
+         fi
+         if [ "$datasum" != "" ]
+         then
+            query=$query", fDataSum='"$datasum"'"
+         fi
+         if [ "$numdrsfiles" != "" ]
+         then
+            query=$query", fHasDrsFile="$numdrsfiles
+         fi
+         if [ "$step" != "" ]
+         then
+            query=$query", fDrsStep="$step
+         fi
+         if [ "$compiletime" != "" ]
+         then
+            query=$query", fCompileTime='"$compiletime"'"
+         fi
+         if [ "$revnum" != "" ]
+         then
+            query=$query", fRevisionNumber='"$revnum"'"
+         fi
+      fi
+
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      echo $query
+
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillAuxTemp.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillAuxTemp.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillAuxTemp.sh	(revision 18288)
@@ -0,0 +1,223 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillAuxTemp-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   if [ $runnumber -lt 20120328 ]
+   then
+      continue
+   fi
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fCameraTempMean) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   magicweatherfile=$auxdir/$runnumber.MAGIC_WEATHER_DATA.fits
+   if ! [ -e $magicweatherfile ]
+   then 
+      printprocesslog "WARN "$magicweatherfile" not found."
+      #echo "WARN "$magicweatherfile" not found."
+   else
+      weathernumerrors=`fverify $magicweatherfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $weathernumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $magicweatherfile fverify returned "$weathernumerrors" error(s)."
+      fi
+   fi
+
+   fsctempfile=$auxdir/$runnumber.FSC_CONTROL_TEMPERATURE.fits
+   if ! [ -e $fsctempfile ]
+   then 
+      printprocesslog "WARN "$fsctempfile" not found."
+      #echo "WARN "$fsctempfile" not found."
+   else
+      tempnumerrors=`fverify $fsctempfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $tempnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $fsctempfile fverify returned "$tempnumerrors" error(s)."
+      fi
+   fi
+
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      #checkfitsfile=`fverify $rawfile  2>/dev/null | grep '0 error(s)'`
+      #if [ "$checkfitsfile" == "" ]
+      #then
+      #   numfitserrors=1
+      #   printprocesslog "WARN: "$rawfile" probably corrupted."
+      #   continue
+      #fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+      #echo $rawfile" "$tstart" "$tstop
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get information from fsc: T[31]
+      if [ -e $fsctempfile ] && [ $tempnumerrors -eq 0 ]
+      then 
+         fsctemps=( `root -q -b -l fact/processing/camtemp.C\("\"$fsctempfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         if [ "${fsctemps[0]}" == "" ]
+         then 
+            query=$query"fCameraTempMean=NULL"
+         else
+            query=$query"fCameraTempMean="${fsctemps[0]}
+         fi
+         if [ "${fsctemps[1]}" == "" ]
+         then 
+            query=$query", fCameraTempRms=NULL"
+         else
+            query=$query", fCameraTempRms="${fsctemps[1]}
+         fi
+         if [ "${fsctemps[2]}" == "" ]
+         then 
+            query=$query", fCameraTempRmsMean=NULL"
+         else
+            query=$query", fCameraTempRmsMean="${fsctemps[2]}
+         fi
+      else
+         query=$query" fCameraTempMean=NULL"
+         query=$query", fCameraTempRms=NULL"
+         query=$query", fCameraTempRmsMean=NULL"
+      fi
+      #fCameraTempMeanRms: mean of rms of single sensors
+      
+      # get information from weather: T
+      if [ -e $magicweatherfile ] && [ $weathernumerrors -eq 0 ]
+      then 
+         mtemps=( `root -q -b -l fact/processing/magictemp.C\("\"$magicweatherfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         if [ "${mtemps[0]}" == "" ]
+         then 
+            query=$query", fOutsideTempMean=NULL"
+         else
+            query=$query", fOutsideTempMean="${mtemps[0]}
+         fi
+         if [ "${mtemps[1]}" == "" ]
+         then 
+            query=$query", fOutsideTempRms=NULL"
+         else
+            query=$query", fOutsideTempRms="${mtemps[1]}
+         fi
+      else
+         query=$query", fOutsideTempMean=NULL"
+         query=$query", fOutsideTempRms=NULL"
+      fi
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillAuxThresholds.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillAuxThresholds.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillAuxThresholds.sh	(revision 18288)
@@ -0,0 +1,228 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillThresholds-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+#echo ${dates[@]}
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fThresholdMedMean) AND ISNULL(fThresholdMinSet) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+   
+   thresholdfile=$auxdir/$runnumber.RATE_CONTROL_THRESHOLD.fits
+   printprocesslog "INFO processing "$thresholdfile
+   echo "INFO processing "$thresholdfile >> $logfile 2>&1
+   if ! [ -e $thresholdfile ]
+   then 
+      printprocesslog "WARN "$thresholdfile" not found."
+      continue
+   else
+      threshnumerrors=`fverify $thresholdfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $threshnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $thresholdfile fverify returned "$threshnumerrors" error(s)."
+      fi
+   fi
+
+   thresholdfile2=$auxdir/$runnumber.FTM_CONTROL_STATIC_DATA.fits
+   #ls $thresholdfile2
+   if ! [ -e $thresholdfile2 ]
+   then 
+      printprocesslog "WARN "$thresholdfile2" not found."
+      continue
+   else
+      threshnumerrors2=`fverify $thresholdfile2 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $threshnumerrors2 -gt 0 ]
+      then 
+         printprocesslog "WARN for $thresholdfile2 fverify returned "$threshnumerrors2" error(s)."
+      fi
+   fi
+
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get information 
+      if [ -e $threshcurrentsfile ] && [ $threshnumerrors -eq 0 ]
+      then 
+         thresholds1=( `root -q -b -l fact/processing/threshold.C\("\"$thresholdfile2\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         #root -q -b -l fact/threshold.C\("\"$thresholdfile2\""\,$tstart\,$tstop\) | grep "result"
+         #echo ${thresholds1[@]}
+         if [ "${thresholds1[0]}" == "" ]
+         then 
+            query=$query"fThresholdMedMean=NULL"
+         else
+            query=$query"fThresholdMedMean="${thresholds1[0]}
+         fi
+         if [ "${thresholds1[1]}" == "" ]
+         then 
+            query=$query", fThresholdMedRms=NULL"
+         else
+            query=$query", fThresholdMedRms="${thresholds1[1]}
+         fi
+         if [ "${thresholds1[2]}" == "" ]
+         then 
+            query=$query", fThresholdMax=NULL"
+         else
+            query=$query", fThresholdMax="${thresholds1[2]}
+         fi
+         if [ "${thresholds1[3]}" == "" ]
+         then 
+            query=$query", fThresholdAvgMean=NULL"
+         else
+            query=$query", fThresholdAvgMean="${thresholds1[3]}
+         fi
+      else
+         query=$query" fThresholdMedMean=NULL"
+         query=$query", fThresholdMedRms=NULL"
+         query=$query", fThresholdMax=NULL"
+         query=$query", fThresholdAvgMean=NULL"
+      fi
+      
+      # get information 
+      if [ -e $threshcurrentsfile2 ] && [ $threshnumerrors2 -eq 0 ]
+      then 
+         thresholds2=( `root -q -b -l fact/processing/lastth.C\("\"$thresholdfile\""\,$tstart\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         #root -q -b -l fact/lastth.C\("\"$thresholdfile\""\,$tstart\) | grep "result"
+         #echo ${thresholds2[@]}
+         if [ "${thresholds2[0]}" == "" ]
+         then 
+            query=$query", fThresholdMinSet=NULL"
+         else
+            query=$query", fThresholdMinSet="${thresholds2[0]}
+         fi
+         if [ "${thresholds2[1]}" == "" ]
+         then 
+            query=$query", fThresholdMinTimeDiff=NULL"
+         else
+            query=$query", fThresholdMinTimeDiff="${thresholds2[1]}
+         fi
+      else
+         query=$query", fThresholdMinSet=NULL"
+         query=$query", fThresholdMinTimeDiff=NULL"
+      fi
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      
+      #echo $query
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillDrsTemp.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillDrsTemp.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillDrsTemp.sh	(revision 18288)
@@ -0,0 +1,200 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/FillDrsTemp-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+#echo ${dates[@]}
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   if [ $runnumber -lt 20120328 ]
+   then
+      continue
+   fi
+   #echo $auxdir" @ "`date` 
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fDrsTempMinMean) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   drstempfile=$auxdir/$runnumber.FAD_CONTROL_TEMPERATURE.fits
+   if ! [ -e $drstempfile ]
+   then 
+      printprocesslog "WARN "$drstempfile" not found."
+      #echo "WARN "$drstempfile" not found."
+   else
+      tempnumerrors=`fverify $drstempfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $tempnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for "$drstempfile" fverify returned "$tempnumerrors" error(s)."
+      fi
+   fi
+
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      #checkfitsfile=`fverify $rawfile  2>/dev/null | grep '0 error(s)'`
+      #if [ "$checkfitsfile" == "" ]
+      #then
+      #   numfitserrors=1
+      #   printprocesslog "WARN: "$rawfile" probably corrupted."
+      #   continue
+      #fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get information from fsc: T[31]
+      if [ -e $drstempfile ] && [ $tempnumerrors -eq 0 ]
+      then 
+         drstemps=( `root -q -b -l fact/processing/drstemp.C\("\"$drstempfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         if [ "${drstemps[0]}" == "" ]
+         then 
+            query=$query"fDrsTempMinMean=NULL"
+         else
+            query=$query"fDrsTempMinMean="${drstemps[0]}
+         fi
+         if [ "${drstemps[1]}" == "" ]
+         then 
+            query=$query", fDrsTempMaxMean=NULL"
+         else
+            query=$query", fDrsTempMaxMean="${drstemps[1]}
+         fi
+         if [ "${drstemps[2]}" == "" ]
+         then 
+            query=$query", fDrsTempMinRmsMean=NULL"
+         else
+            query=$query", fDrsTempMinRmsMean="${drstemps[2]}
+         fi
+         if [ "${drstemps[3]}" == "" ]
+         then 
+            query=$query", fDrsTempMaxRmsMean=NULL"
+         else
+            query=$query", fDrsTempMaxRmsMean="${drstemps[3]}
+         fi
+      else
+         query=$query" fDrsTempMinMean=NULL"
+         query=$query", fDrsTempMaxMean=NULL"
+         query=$query", fDrsTempMinRmsMean=NULL"
+         query=$query", fDrsTempMaxRmsMean=NULL"
+      fi
+      #fCameraTempMeanRms: mean of rms of single sensors
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      
+      #echo $query
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillEffectiveOn.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillEffectiveOn.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillEffectiveOn.sh	(revision 18288)
@@ -0,0 +1,215 @@
+#!/bin/bash
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+logfile=$runlogpath"/EffectiveOn-"$datetime".log"
+date >> $logfile
+
+# setup to use ftools
+source $HEADAS/headas-init.sh
+
+# check if software is available
+if ! ls $factpath/fitsdump >/dev/null 2>&1
+then 
+   printprocesslog "ERROR "$factpath"/fitsdump is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+#   echo $auxdir" @ "`date` 
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND fFitsFileErrors=0 "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fEffectiveOn) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   filenumbers=( `sendquery $query` )
+   # proceed only if there are files available
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+   
+   ftmcontrolfile=$auxdir/$runnumber.FTM_CONTROL_TRIGGER_RATES.fits
+   if ! [ -e $ftmcontrolfile ]
+   then 
+      printprocesslog "WARN "$ftmcontrolfile" not found."
+      continue
+   else
+      ftmnumerrors=`fverify $ftmcontrolfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+      if [ $ftmnumerrors -gt 0 ]
+      then 
+         printprocesslog "WARN for $ftmcontrolfile fverify returned "$ftmnumerrors" error(s)."
+      fi
+   fi
+
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      echo  `date`": processing file number "$runnumber"_"`printf %03d $filenum` >> $logfile 2>&1
+      # get information from rawfile
+      rawfile=$ziprawdata/$date/$runnumber"_"`printf %03d $filenum`.fits.fz
+      if ! [ -e $rawfile ]
+      then 
+         printprocesslog "ERROR: "$rawfile" not found."
+         continue
+      fi
+      runtype=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z-]+[']" | sed -e "s/'//g"`
+      mjdrefraw=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'MJDREF' | grep -E -o '[0-9]{5}'`
+      tstarti=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTI' | grep -E -o '[0-9]{5}'`
+      tstartf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTARTF' | grep -E -o '0[.][0-9]+'`
+      tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPI' | grep -E -o '[0-9]{5}'`
+      tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep 'TSTOPF' | grep -E -o '0[.][0-9]+'`
+      if [ "$tstarti" == "" ] || [ "$tstopi" == "" ] || [ "$tstartf" == "" ] || [ "$tstopf" == "" ]
+      then 
+         printprocesslog "WARN: "$rawfile": one of the following keywords is empty or 0: TSTARTI TSTARTF TSTOPI TSTOPF "
+         continue
+      fi
+      # assuming that at least TSTARTI and TSTOPI are consistent
+      #echo $rawfile
+      #echo $tstarti
+      #echo $tstopi
+      #echo $tstartf
+      #echo $tstopf
+      if [ $tstarti -gt 30000 ]
+      then 
+         tstart=`echo " $tstarti + $tstartf - 40587 " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 40587 - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 40587 - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf - 40587 " | bc -l`
+      else
+         tstart=`echo " $tstarti + $tstartf " | bc -l`
+         tstart2=`echo " $tstarti + $tstartf - 0.00011574 " | bc -l`  # 10 sec
+         #tstart2=`echo " $tstarti + $tstartf - 0.000023148 " | bc -l` # 2 sec
+         tstop=`echo " $tstopi + $tstopf " | bc -l`
+      fi
+
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET "
+
+      # get information from fsc: T[31]
+      if [ -e $ftmcontrolfile ] && [ $ftmnumerrors -eq 0 ]
+      then 
+         effectiveon=( `root -q -b -l fact/processing/ontime.C\("\"$ftmcontrolfile\""\,$tstart\,$tstop\) | grep "result" | grep -E -o '[0-9]+[.]?[0-9]*'` )
+         if [ "${effectiveon[0]}" == "" ]
+         then 
+            query=$query"fEffectiveOn=NULL"
+         else
+            query=$query"fEffectiveOn="${effectiveon[0]}
+         fi
+         if [ "${effectiveon[1]}" == "" ]
+         then 
+            query=$query", fEffectiveOnRms=NULL"
+         else
+            query=$query", fEffectiveOnRms="${effectiveon[1]}
+         fi
+         if [ "${effectiveon[2]}" == "" ]
+         then
+            query=$query", fOnTime=NULL"
+         else
+            query=$query", fOnTime="${effectiveon[2]}
+         fi
+         if [ "${effectiveon[3]}" == "" ]
+         then
+            query=$query", fTriggerRateTimeOver100=NULL"
+         else
+            query=$query", fTriggerRateTimeOver100="${effectiveon[3]}
+         fi
+         if [ "${effectiveon[4]}" == "" ]
+         then
+            query=$query", fTriggerRateTimeOver125=NULL"
+         else
+            query=$query", fTriggerRateTimeOver125="${effectiveon[4]}
+         fi
+         if [ "${effectiveon[5]}" == "" ]
+         then
+            query=$query", fTriggerRateTimeOver150=NULL"
+         else
+            query=$query", fTriggerRateTimeOver150="${effectiveon[5]}
+         fi
+         if [ "${effectiveon[6]}" == "" ]
+         then
+            query=$query", fTriggerRateTimeOver175=NULL"
+         else
+            query=$query", fTriggerRateTimeOver175="${effectiveon[6]}
+         fi
+         if [ "${effectiveon[7]}" == "" ]
+         then
+            query=$query", fTriggerRateRms=NULL"
+         else
+            query=$query", fTriggerRateRms="${effectiveon[7]}
+         fi
+      else
+         query=$query" fEffectiveOn=NULL"
+         query=$query", fEffectiveOnRms=NULL"
+         query=$query", fOnTime=NULL"
+         query=$query", fTriggerRateTimeOver100=NULL"
+         query=$query", fTriggerRateTimeOver125=NULL"
+         query=$query", fTriggerRateTimeOver150=NULL"
+         query=$query", fTriggerRateTimeOver175=NULL"
+         query=$query", fTriggerRateRms=NULL"
+
+      fi
+      
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+
+      #send query to DB
+      sendquery >/dev/null
+
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillFileSizes.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillFileSizes.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillFileSizes.sh	(revision 18288)
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+query="SELECT Concat(fNight, '_', LPAD(fRunId, 3, 0)) from RunInfo WHERE fFitsFileErrors=0 AND ISNULL(fFileSize)"
+runs=( `sendquery` )
+
+printprocesslog ${#runs[@]}" runs to fill."
+
+for run in ${runs[@]}
+do
+  year=`echo $run | cut -c 1-4`
+  month=`echo $run | cut -c 5-6`
+  day=`echo $run | cut -c 7-8`
+  file=$rawdata"/"$year"/"$month"/"$day"/"$run".fits.fz"
+  if ! [ -e $file ]
+  then
+     #echo $file" does not exist."
+     printprocesslog "DEBUG "$file" does not exist."
+     continue
+  fi
+  night=`echo $run | cut -d_ -f1`
+  runid=`echo $run | cut -d_ -f2`
+  size=`stat -c '%s' $file`
+  query="UPDATE RunInfo SET fFileSize="$size" WHERE fNight="$night" AND fRunID"=$runid
+  printprocesslog "INFO fill size ("$size") for file "$file
+  sendquery >/dev/null
+done
+
Index: branches/trigger_burst_research/Processing/FillMoonInfo.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillMoonInfo.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillMoonInfo.sh	(revision 18288)
@@ -0,0 +1,104 @@
+#!/bin/bash
+
+# script to fill moon information to DB
+# doesn't need raw files
+# probably doesn't run at ISDC (FACT++/moon missing)
+
+# option whether to fill all row or only those where information is missing
+# $doupdate might be given as environment variable
+if [ "$doupdate" = "" ]
+then
+   doupdate="yes" # update all entries (needed when new fields have been added)
+   doupdate="no" # fill only entries which are not yet existing (default)
+fi
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 with option doupdate="$doupdate
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   runnumber=`echo $date | sed -e 's/\///g'`
+   if [ $runnumber -lt 20111115 ]
+   then
+      continue
+   fi
+   printprocesslog "INFO processing date "$date
+   #echo "INFO processing date "$date
+
+   # get file numbers from DB
+   #   but only for not-corrupted files
+   query="SELECT fRunID from RunInfo WHERE fNight="$runnumber" AND NOT ISNULL(fRunStart) "
+   if [ "$doupdate" = "no" ]
+   then
+      query=$query" AND ISNULL(fMoonDisk) "
+   fi
+   printprocesslog "DEBUG get filenumbers from DB: QUERY: "$query
+   # proceed only if there are files available
+   filenumbers=( `sendquery` )
+   if [ ${#filenumbers} -eq 0 ]
+   then
+      printprocesslog "INFO No files found in the DB for night "$date
+      continue
+   fi
+
+   # fill auxiliary information for files
+   for filenum in ${filenumbers[@]}
+   do
+      printprocesslog "INFO processing file number "$runnumber"_"`printf %03d $filenum`
+      
+      # get input info from DB 
+      #  query 999 in case value is empty to easily recognize this case
+      query="SELECT if (isnull(fRightAscension), 999, fRightAscension), "
+      query=$query" if (isnull(fDeclination), 999, fDeclination), "
+      query=$query" fRunStart from RunInfo "
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+      info=( `sendquery` )
+      if [ "${info[0]}" == "999" ] && [ "${info[1]}" == "999" ]
+      then 
+         lightinfo=( `$factpath/moon "${info[2]} ${info[3]}" 2>/dev/null` )
+      else
+         lightinfo=( `$factpath/moon "${info[2]} ${info[3]}" --ra=${info[0]} --dec=${info[1]} 2>/dev/null` )
+      fi
+      # return values of the programm
+      # timestamp sunzd moon-visible moondisk moonzd angletomoon angletosun
+      
+      # build query to update runinfo in DB
+      query="UPDATE RunInfo SET fSunZenithDistance="${lightinfo[2]}", fMoonDisk="${lightinfo[4]}
+      query=$query", fMoonZenithDistance="${lightinfo[5]}
+      if [ "${info[0]}" != "999" ] && [ "${info[1]}" != "999" ]
+      then 
+         query=$query", fAngleToMoon="${lightinfo[6]}
+         query=$query", fAngleToSun="${lightinfo[7]}
+      fi
+      # add where condition
+      query=$query" WHERE fNight="$runnumber" AND fRunID="$filenum
+
+      #echo $query
+      # send query to DB
+      sendquery >/dev/null
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillNumEvts.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillNumEvts.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillNumEvts.sh	(revision 18288)
@@ -0,0 +1,90 @@
+#!/bin/bash
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting "$0
+
+logfile=$runlogpath"/FillNumEvts-"$datetime".log"
+date >> $logfile
+
+# this script doesn't need variable $doupdate
+# filling is done by macro. therefore update is always done
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights if hour between 7 and 19h, else only current night
+   getdates 6 7 19
+fi
+
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   runnumber=`echo $date | sed -e 's/\///g'`
+   
+   # process only data which are available
+   if [ $runnumber -lt $firstnight ]
+   then
+      continue
+   fi
+   
+   # fill run-wise processing
+   if [ -d $anapath/ganymed_run/$date ]
+   then 
+      echo "run numevts.C for night "$runnumber" (run-wise processing) for table "$resulttable1 >> $logfile 2>&1
+      printprocesslog "INFO run numevts.C for night "$runnumber" (run-wise processing) for table "$resulttable1
+      printprocesslog "DEBUG root -q -b -l fact/processing/numevents.C+\($runnumber\,"\"$anapath"\"\,"\"$resulttable1"\"\,kFALSE\,kFALSE\)"
+      check1=`root -q -b -l fact/processing/numevents.C+\($runnumber\,"\"$anapath"\"\,"\"$resulttable1"\"\,kFALSE\,kFALSE\) | tee $logfile | intgrep`
+      
+      case $check1 in
+         1)   printprocesslog "INFO filling numevts.C was successfully for night "$runnumber" and table "$resulttable1" (check1=$check1)"
+              ;;
+         0)   printprocesslog "WARN connection to DB failed in numevts.C (check1=$check1)"
+              ;;
+         *)   printprocesslog "ERROR numevts.C failed for night "$runnumber" and table "$resulttable1" (check1=$check1)"
+              ;;
+      esac
+   fi
+   
+   query="SELECT fSourceKEY FROM RunInfo WHERE fNight="$runnumber" AND fSourceKey > 0 AND fRunTypeKEY=1 GROUP BY fSourceKey "
+   sources=( `sendquery` )
+   # fill night-wise processing
+   for source in ${sources[@]}
+   do
+      if [ -d $anapath/ganymed_night/$source/$date ]
+      then 
+         echo "run numevts.C for night "$runnumber" and source "$source" (night-wise processing) " >> $logfile 2>&1
+         printprocesslog "INFO run numevents.C for night "$runnumber" and source "$source" (night-wise processing) "
+         printprocesslog "DEBUG root -q -b -l fact/processing/numevents.C+\($runnumber\,"\"$anapath"\"\,"\"$resulttable2"\"\,kFALSE\,kTRUE\,$source\)"
+
+         check1=`root -q -b -l fact/processing/numevents.C+\($runnumber\,"\"$anapath"\"\,"\"$resulttable2"\"\,kFALSE\,kTRUE\,$source\) | tee $logfile | intgrep`
+         case $check1 in
+            1)   printprocesslog "INFO filling numevts.C was successfully for night "$runnumber" and table "$resulttable2" (check1=$check1)"
+                 ;;
+            0)   printprocesslog "WARN connection to DB failed in numevts.C (check1=$check1)"
+                 ;;
+            *)   printprocesslog "ERROR numevts.C failed for night "$runnumber" and table "$resulttable2" (check1=$check1)"
+                 ;;
+         esac
+      fi
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/FillRatescans.sh
===================================================================
--- branches/trigger_burst_research/Processing/FillRatescans.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/FillRatescans.sh	(revision 18288)
@@ -0,0 +1,94 @@
+#!/bin/bash
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting "$0
+
+## setup to use ftools
+#source $HEADAS/headas-init.sh
+
+logfile=$runlogpath"/FillRatescan-"$datetime".log"
+date >> $logfile
+
+# this script doesn't need variable $doupdate
+# filling is done by macro and update is always done
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'\|'^[0-9][0-9][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights
+   getdates 6
+fi
+
+
+printprocesslog "INFO processing the following night(s): "${dates[@]}
+echo  `date`": processing the following night(s): "${dates[@]} >> $logfile 2>&1
+
+#echo ${dates[@]}
+
+cd $mars
+
+# do filling of aux data 
+for date in ${dates[@]}
+do 
+   auxdir=$auxdata/$date
+   runnumber=`echo $date | sed -e 's/\///g'`
+   
+   # check if aux files are available from that night
+   if ! [ -d $auxdir ]
+   then
+      printprocesslog "INFO no data available in "$auxdir
+      continue
+   else
+      printprocesslog "INFO processing files in "$auxdir
+   fi
+   
+   ratescanfile=$auxdir/$runnumber.RATE_SCAN_DATA.fits
+   printprocesslog "INFO processing "$ratescanfile
+   echo "INFO processing "$ratescanfile >> $logfile 2>&1
+   if ! [ -e $ratescanfile ]
+   then 
+      rawdir=$rawdata/$date
+      # check if raw files are available from that night
+      if ! [ -d $rawdir ]
+      then
+         printprocesslog "INFO "$ratescanfile" not found."
+      else
+         printprocesslog "WARN "$ratescanfile" not found."
+      fi
+      continue
+#   else
+#      ratescannumerrors=`fverify $ratescanfile 2>/dev/null | grep -o '[0-9][ ]error(s)'  | grep -E -o '[0-9]'`
+#      if [ $ratescannumerrors -gt 0 ]
+#      then 
+#         printprocesslog "WARN for $ratescanfile fverify returned "$ratescannumerrors" error(s)."
+#      fi
+   fi
+
+   printprocesslog "INFO run fillratescan.C for night "$runnumber
+   echo "run fillratescan.C for night "$runnumber >> $logfile 2>&1
+   check1=`root -q -b -l fact/processing/fillratescan.C\("\"$ratescanfile"\"\,kFALSE\) | tee $logfile | intgrep`
+   case $check1 in
+      1)   printprocesslog "INFO fillratescan.C was successfully for night "$runumber" (check1=$check1)."
+           ;;
+      0)   printprocesslog "WARN connection to DB failed (check1=$check1)."
+           ;;
+      *)   printprocesslog "ERROR fillratescan.C failed for night "$runumber" (check1=$check1)."
+           ;;
+   esac
+
+
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/JobManager.sh
===================================================================
--- branches/trigger_burst_research/Processing/JobManager.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/JobManager.sh	(revision 18288)
@@ -0,0 +1,322 @@
+#!/bin/bash
+#
+# This a script, which launches other scripts (all scripts, that are run 
+# on primary basis)
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+set -C
+shopt -s expand_aliases
+
+# function to continue in loop and go to next script
+function sleepawhile()
+{
+   usedsleeptime=$sleeptime
+   case $1 in 
+      "error") if ! [ "$errorsleeptime" = "" ]
+               then 
+                  if [ $errorsleeptime -lt $sleeptimelimit ]
+                  then 
+                     errorsleeptime=`echo " $errorsleeptime + $errorsleeptimedefault " | bc`
+                  fi
+                  usedsleeptime=$errorsleeptime
+               fi
+               ;;
+         "ok") errorsleeptime=$errorsleeptimedefault
+               ;;
+   esac
+   echo `date +%F\ %T`" sleeping "$usedsleeptime" seconds... (status: "$1")" >> $jmscriptlog 2>&1
+   echo "" >> $jmscriptlog 2>&1
+   sleep $usedsleeptime
+   continue
+}
+
+echo "" >> $jmscriptlog 2>&1
+echo "" >> $jmscriptlog 2>&1
+echo -n `date +%F\ %T`" starting jobmanager for setup "$AUTOMATIONSETUP >> $jmscriptlog 2>&1
+
+user=`whoami`
+
+# choose commands according to queueing system (defined in setup)
+case $queuesys in
+      sge)  echo " on queuing system 'sun grid engine'" >> $jmscriptlog 2>&1
+            # (-hard) -l hostname=compute-*
+            #   for qstat this returns the jobs running on that node + all jobs in the queue
+            alias 'queuesubmit'='$sgepath/qsub -b y -q `echo ${queues[$i]}` -t 1-`echo $tosubmit` -v AUTOMATIONSETUP=$AUTOMATIONSETUP -e `echo $runlogpath`/error-`echo $date`.log -o `echo $runlogpath`/log-`echo $date`.log `echo $noderequirementsub` -N `echo $step` `echo $scriptspath`/`echo ${scripts[$i]}` '
+            #alias 'queuesubmit'='$sgepath/qsub -b y -q fact_long -t 1-`echo $tosubmit` -v AUTOMATIONSETUP=$AUTOMATIONSETUP -e `echo $runlogpath`/error-`echo $date`.log -o `echo $runlogpath`/log-`echo $date`.log `echo $noderequirementsub` -N `echo $step` `echo $scriptspath`/`echo ${scripts[$i]}` '
+#            alias 'queuesubmit'='$sgepath/qsub -b y -v AUTOMATIONSETUP=$AUTOMATIONSETUP -e `echo $runlogpath`/error-`echo $date`.log -o `echo $runlogpath`/log-`echo $date`.log `echo $scriptspath`/`echo ${scripts[$i]}` '
+#            alias 'queuesubmit'='$sgepath/qsub -sc runlogpath=`echo $runlogpath` -sc date=`echo $date` -sc scriptspath=`echo $scriptspath` -sc script=`echo ${scripts[$i]}` `echo $scriptspath`/job.sge '
+            # FIXME: get complete scriptname (including command line option), needed for runstereo
+            alias 'checkqueue'="$sgepath/qstat \`echo \$noderequirementstat\`  | awk ' { print \"Owner\"\$4\" \" \$3\"Jobstatus\"\$5 } '"
+            #break
+            ;;
+#      pbs)  echo " on queuing system 'pbs'" >> $jmscriptlog 2>&1
+#            alias 'queuesubmit'='$pbspath/qsub -t 1-`echo $tosubmit` -l walltime=`echo $walltime` -l pmem=`echo $pmem` -v AUTOMATIONSETUP=$AUTOMATIONSETUP,SOURCEFILEPATH=$SOURCEFILEPATH,SCRIPTNAME=`echo ${scripts[$i]}` -e `echo $runlogpath`/error-`echo $date`.log -o `echo $runlogpath`/log-`echo $date`.log `echo $noderequirementsub` `echo $scriptspath`/`echo ${scripts[$i]}` '
+#            # check queue (restricted to current user only)
+#            alias 'checkqueue'="$pbspath/qstat -a -u $user | awk ' { print \"Owner\"\$2\" \" \$4\"Jobstatus\"\$10 } '"
+#            #break
+#            ;;
+#   condor)  echo " on queuing system 'condor'" >> $jmscriptlog 2>&1
+#            alias 'queuesubmit'='$condorpath/condor_submit -a path=`echo $scriptspath` -a prog=`echo ${scripts[$i]}` -a date=`echo $date` -a dir=`echo $runlogpath` -a num=`echo $tosubmit` -a automationsetup=$AUTOMATIONSETUP `echo $scriptspath`/run.condor'
+#            alias 'checkqueue'='$condorpath/condor_q -global -format "Owner%s " Owner -format "%s" CMD -format "Jobstatus%s\n" Jobstatus '
+#            #break 
+#            ;;
+        *)  echo "" >> $jmscriptlog 2>&1
+            finish >> $jmscriptlog 2>&1
+            ;;
+esac
+
+echo "" >> $jmscriptlog 2>&1
+
+# for processing with local storage on different nodes
+currentnode=$minnode
+numevaluated=0
+
+# endless loop
+notcount=0
+errorsleeptime=$errorsleeptimedefault
+while (( $notcount < 100 ))
+do
+   # get and set some information for the processing
+   source `dirname $0`/../Sourcefile.sh
+   # reset some values
+   tosubmit=0
+   idleratio=0
+   addtoscript=
+   
+   # get processes in queue
+   q=(`checkqueue 2>&1 `)
+   if echo $q | egrep \(Error\|failed\)
+   then 
+      echo `date +%F\ %T`" WARN checking query ($queuesys) failed" >> $jmscriptlog 2>&1
+      printprocesslog "WARN checking query ($queuesys) failed"
+      echo `date +%F\ %T`" WARN checking query ($queuesys) failed" >> $jmerrorlog
+      sleepawhile "error"
+   fi
+
+   # general check whether one should submit something depending on chosen algorithm
+   # algorithm 1: 
+   #   submit new jobs in case there are less than $limitidle idle jobs in the queue
+   # algorithm 2: 
+   #   submit new jobs in case the total number of jobs in the queue has fallen below $totalpno
+   case $algorithm in
+      1) # algorithm 1
+         # get number of idle jobs in the queue
+         q5=( `echo ${q[@]} | egrep -o \(Jobstatus1\|Jobstatusq\|JobstatusQ\)` )
+         idle=${#q5[@]}
+         if [ $idle -gt $limitidle ]
+         then 
+            echo `date +%F\ %T`" more than "$limitidle" jobs waiting ("$idle")" >> $jmscriptlog 2>&1
+            sleepawhile "ok"
+         fi
+         ;;
+      2) # algorithm 2
+         # get processes of user in queue
+         q1=( `echo ${q[@]} | egrep -o "Owner$user"`)
+         queued=${#q1[@]}
+         hour=`date +%k`
+         # choose array of total number of jobs to be done 
+         #   according to the day of the week
+         dayofweek=`date +%u`
+         case $dayofweek in
+            0 | 6)  totalpno=${pnototal[$hour]} ;;
+                *)  totalpno=${pnototalwe[$hour]} ;;
+         esac
+         # get total number of jobs to be submitted
+         if [ $queued -gt $totalpno ]
+         then 
+            echo `date +%F\ %T`" more than "$totalpno" jobs waiting ("$queued")" >> $jmscriptlog 2>&1
+            sleepawhile "ok"
+         else
+            tosubmittotal=`echo "$totalpno - $queued" | bc -l`
+         fi
+         ;;
+      *) echo "Please give an algorithm to calculate the number of allowed jobs."
+         exit
+         ;;
+   esac
+   echo `date +%F\ %T`" Total number of jobs to be submitted: "$tosubmittotal >> $jmscriptlog 2>&1
+
+   # first loop to determine 
+   # a) how many jobs of this script have to be done
+   # b) how many jobs of this script are running or queued
+   todo=()
+   tododb=()
+   for (( i=0 ; i < ${#scripts[@]} ; i++ ))
+   do 
+      # set the step to be evaluated
+      step=${scriptscolname[$i]}
+      getstepinfo
+
+      # check if the script is restricted to one node 
+      #   (i.e. where output of previous step(s) is stored)
+      #   this information is taken from the steps.rc file
+      #   currently this is implemented for sge only
+      # then get number of jobs to be done 
+      if [ "$noderestricted" = "yes" ]
+      then
+         # get number of next node 
+         if [ $numevaluated -ge $numrestrictedscripts ]
+         then 
+            currentnode=`echo " $currentnode + 1 " | bc -l`
+            numevaluated=1
+         else
+            numevaluated=`echo " $numevaluated + 1 " | bc -l`
+         fi
+         if [ $currentnode -gt $maxnode ]
+         then 
+            currentnode=$minnode
+         fi
+         # check if node is excluded
+         for excludednode in ${excludednodes[@]}
+         do
+            if [ $currentnode -eq $excludednode ]
+            then
+               echo `date +%F\ %T`" Node compute-0-$currentnode is currently excluded." >> $jmscriptlog 2>&1
+               continue 2
+            fi
+         done
+         # define requirement for submission 
+         # FIXME: currently only for sge at isdc
+         echo `date +%F\ %T`" Checking for node $currentnode. " >> $jmscriptlog 2>&1
+         noderequirementsub=" -hard -l hostname=compute-0-${currentnode}"
+         noderequirementstat=" -l hostname=compute-0-${currentnode}"
+         # get number of jobs to be done from the DB
+         getstatus $currentnode >> $jmscriptlog 2>&1
+      else
+         noderequirementsub=""
+         noderequirementstat=""
+         # get number of jobs to be done from the DB
+         getstatus >> $jmscriptlog 2>&1
+      fi
+      # store the number of processes to be done for this script
+      todo[$i]=$numproc
+      tododb[$i]=$numproc
+      
+      # FIXME: sge cuts scriptname to 8 digits in qstat
+      # number of idle jobs, i.e. jobs waiting in the queue to run
+      #   condor: 1
+      #   sge: q
+      #   pbs: Q
+      #q4=( `echo ${q[@]} | egrep -o \("${scripts[$i]}"Jobstatus1\|"${scripts[$i]}"Jobstatusq\|"${scripts[$i]}"JobstatusQ\)` )
+      q4=( `echo ${q[@]} | egrep -o \("$step"Jobstatus1\|"$step"Jobstatusq\|"$step"JobstatusQ\)` )
+      idlescript[$i]=${#q4[@]}
+
+      #q2=( `echo ${q[@]} | egrep -o "${scripts[$i]}"`)
+      q2=( `echo ${q[@]} | egrep -o "$step"`)
+      queuedscript[$i]=${#q2[@]}
+
+      stillfree[$i]=`echo "${maxjobs[$i]} - ${queuedscript[$i]} " | bc -l`
+
+      if [ $numproc -eq 0 ] || [ ${todo[$i]} -le ${idlescript[$i]} ] || [ ${maxjobs[$i]} -le ${queuedscript[$i]} ]
+      then 
+         # store the fraction of cpus to add it to another process
+         idleratio=`echo " ${ratio[$i]} + $idleratio " | bc -l`
+         ratio[$i]=0
+         todo[$i]=0
+         idlenum=$i
+         continue
+      fi
+   done
+   echo `date +%F\ %T`" Evaluated scripts: "${scripts[@]} >> $jmscriptlog 2>&1
+   echo `date +%F\ %T`" Running scripts: "${queuedscript[@]}" (max: "${maxjobs[@]}")" >> $jmscriptlog 2>&1
+   echo `date +%F\ %T`" Number of jobs to be done (from DB): "${tododb[@]} >> $jmscriptlog 2>&1
+   echo `date +%F\ %T`" Number of jobs to be done (updated): "${todo[@]} >> $jmscriptlog 2>&1
+   echo `date +%F\ %T`" Ratio: "${ratio[@]}" (idle: "$idleratio")" >> $jmscriptlog 2>&1
+   
+   # loop to update the ratio taking into account the ratio of
+   #   a) steps where nothing has to done
+   #   b) steps where already enough jobs are in the queue
+   # sum up this idle ratio
+   # determine for which step still most jobs have to be done
+   if ! [ "$idleratio" = "0" ]
+   then 
+      addtoscript=
+      for (( i=0 ; i < ${#scripts[@]} ; i++ ))
+      do 
+         if [ ${todo[$i]} -gt ${todo[$idlenum]} ] && [ ${todo[$i]} -gt 0 ] 
+         then
+            if ! [ "$addtoscript" = "" ] 
+            then 
+               if [ ${todo[$i]} -lt ${todo[$addtoscript]} ]
+               then
+                  continue
+               fi
+            fi
+            addtoscript=$i
+         fi
+      done
+      
+      # continue in case nothing has to be done for all steps
+      # else: update the ratio for the step where most jobs have to be done
+      #   by adding the idle ratio
+      if [ "$addtoscript" = "" ] 
+      then
+         echo `date +%F\ %T`" No jobs to be done for any step." >> $jmscriptlog 2>&1
+         sleepawhile "ok"
+      else
+         ratio[$addtoscript]=`echo " ${ratio[$addtoscript]} + $idleratio " | bc -l`
+      fi
+   fi
+   echo `date +%F\ %T`" Updated ratio: "${ratio[@]} >> $jmscriptlog 2>&1
+   
+   
+   # loop to submit jobs to queueing system
+   for (( i=0 ; i < ${#scripts[@]} ; i++ ))
+   do 
+      # calculate number of jobs to be submitted
+      tosubmit=`echo "scale=0; $tosubmittotal * ${ratio[$i]} / 1 " | bc -l`
+      if [ ${todo[$i]} -lt $tosubmit ]
+      then
+         echo `date +%F\ %T`" Updating tosubmit for "${scripts[$i]}" from "$tosubmit" to "${todo[$i]} >> $jmscriptlog 2>&1
+         tosubmit=${todo[$i]}
+      fi
+      if [ $tosubmit -eq 0 ]
+      then
+         echo `date +%F\ %T`" No jobs to be submitted for script '"${scripts[$i]}"'" >> $jmscriptlog 2>&1
+         continue
+      fi
+      if [ $tosubmit -gt ${stillfree[$i]} ]
+      then
+         echo `date +%F\ %T`" Updating tosubmit for "${scripts[$i]}" from "$tosubmit" to "${stillfree[$i]} >> $jmscriptlog 2>&1
+         tosubmit=${stillfree[$i]}
+      fi
+      
+      # set the step to be evaluated
+      step=${scriptscolname[$i]}
+      # check if walltime has to be set
+      if [ "$setwalltime" = "yes" ]
+      then
+         walltime=${walltimes[$i]}
+      fi
+      # check if memory has to be set
+      if [ "$setpmem" = "yes" ]
+      then
+         pmem=${pmems[$i]}
+      fi
+
+      # set $tosubmit to 1 if something is 'qw', because in this case 
+      #   only one line for several jobs might be shown
+      if [ ${idlescript[$i]} -gt 0 ] # && [ "$step" == "Callisto" ]
+      then 
+         tosubmit=1
+      fi
+      echo "tosubmit: "$tosubmit
+      # submit $tosubmit scripts to queuing system
+      #tosubmit=1 #workaround for test on fact cluster
+      echo `date +%F\ %T`" Submitting "$tosubmit" jobs for script '"${scripts[$i]}"' to "$queuesys >> $jmscriptlog 2>&1
+      date=`date +%Y-%m-%d`
+      if ! queuesubmit 2>> $jmerrorlog
+      then 
+         echo `date`" WARN submitting job ($queuesys) failed" >> $jmerrorlog
+         echo `date +%F\ %T`" WARN $queuesys is not working -> sleeping $errorsleeptime [\$errorsleeptime]" >> $jmscriptlog 2>&1
+         printprocesslog "WARN submitting ${scripts[$i]} ($queuesys) failed"
+         sleepawhile "error"
+      fi
+      echo ""
+   done
+   sleepawhile "ok"
+done
+
Index: branches/trigger_burst_research/Processing/RunCallisto.sh
===================================================================
--- branches/trigger_burst_research/Processing/RunCallisto.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/RunCallisto.sh	(revision 18288)
@@ -0,0 +1,67 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=RunCallisto
+step=Callisto
+
+set -C
+
+for (( i=0; i<100; i++ ))
+do 
+   # get todo list
+   gettodo "1"
+
+   # get all needed variables
+   night=${primaries[0]}
+   seqid=${primaries[1]}
+   nightpath=`echo $night | cut -c 1-4`"/"`echo $night | cut -c 5-6`"/"`echo $night | cut -c 7-8`
+   seqnum=$night"_"`printf %03d $seqid`
+
+   # check if script is already running
+   lockfile=$lockpath"/lock-"$program"-"$seqnum".txt"
+   checklock continue
+   printprocesslog "INFO got todo with try #"$i
+   break
+done
+
+# get all needed paths and files
+delays="resources/delays-20150217.txt"
+drstimefiles=`ls $drstimepath | sort`
+for drstimefile in $drstimefiles
+do 
+   num=`echo $drstimefile | cut -c 1-8`
+   if [ $num -gt $night ]
+   then
+      break
+   fi
+   drstime=$drstimepath/$drstimefile
+done
+   
+seqfile=$seqpath"/"$nightpath"/"$seqnum".seq"
+outpath=$datapath"/callisto/"$nightpath
+makedir $outpath
+logfile=$outpath"/"$seqnum"-calibration.log"
+
+cd $mars
+
+# run callisto 
+printprocesslog "INFO starting callisto.C for sequence "$seqnum
+setstatus "start" 
+
+printprocesslog "DEBUG root -q -b fact/analysis/callisto.C\("\"$seqfile\""\,"\"$outpath\""\,\""$drstime"\"\,\""$delays"\"\) | tee $logfile "
+check1=`root -q -b fact/analysis/callisto.C\("\"$seqfile\""\,"\"$outpath\""\,\""$drstime"\"\,\""$delays"\"\) | tee $logfile | intgrep`
+
+case $check1 in
+   0)   printprocesslog "INFO callisto was successful for sequence "$seqnum" (check1=$check1)"
+        ;;
+   *)   printprocesslog "ERROR callisto.C failed for sequence "$seqnum" (check1=$check1)"
+        check=$check1
+        ;;
+esac
+
+setstatus "stop" 
+
+finish 
+
Index: branches/trigger_burst_research/Processing/RunCeres.sh
===================================================================
--- branches/trigger_burst_research/Processing/RunCeres.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/RunCeres.sh	(revision 18288)
@@ -0,0 +1,101 @@
+#!/bin/bash
+#
+#
+# This script is launching ceres for corsika runs. 
+# 
+# The ceres.rc and other setup files are stored in the setup directory.
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=ceres
+step=Ceres
+
+set -C
+
+# get run # 
+numchanged=0
+while (( 0 < 21 ))
+do 
+   if ! [ $numchanged -eq 1 ]
+   then 
+      if ! [ "$run" = "" ] && ! [ "$cereskey" = "" ]
+      then
+         printprocesslog "INFO $program for run $run cereskey $cereskey is already running => request new number "
+      fi
+      gettodo "1" 
+      run=${primaries[0]}
+      cereskey=${primaries[1]}
+      if [ "$run" = "" ] || [ "$cereskey" = "" ]
+      then
+         printprocesslog "INFO nothing to do for $program "
+      fi
+   else
+      printprocesslog "INFO starting $program for run $run cereskey $cereskey"
+      break
+   fi
+   setstatus "start"
+done
+
+cd $mars
+
+# run ceres for run
+
+setupfile=$setuppath/$program/`printf %03d $cereskey`/ceres.rc
+
+query="SELECT fRunTypeKEY FROM CeresInfo WHERE fRunNumber="$run" AND fCeresSetupKEY="$cereskey" GROUP BY fRunNumber"
+runkey=`sendquery`
+
+case $runkey in 
+   2) printprocesslog "INFO run $run is a data run (key="$runkey")"
+      printprocesslog "INFO getting file numbers for run "$run
+      query="SELECT CONCAT('"$mcpath"/corsika/', LEFT(LPAD(CeresInfo.fRunNumber, 8, '0'), 4),"
+      query=$query" '/', RIGHT(LPAD(CeresInfo.fRunNumber, 8, '0'), 4), '/cer000', "
+      query=$query" RIGHT(LPAD(CeresInfo.fFileNumber, 6, '0'), 3)) from CeresInfo "
+      query=$query" WHERE fRunNumber="$run" AND fCeresSetupKEY="$cereskey
+      inputfiles=`sendquery`
+      outpath=$mcpath/ceres/`printf %03d $cereskey`/`printf %08d $run | cut -c 1-4`/`printf %08d $run | cut -c 5-8`
+      makedir $outpath
+      log=$outpath/ceres`printf %08d $run `
+      command="./ceres -b -q -f --config=$setupfile --out=$outpath --log=$log.log --html=$log.html --run-number=$run $inputfiles"
+      printprocesslog "INFO executing "$command
+      $command
+      check1=$?
+      ;;
+   3) printprocesslog "INFO run $run is a pedestal run (key="$runkey")"
+      drun=`echo "$run + 2 " | bc -l`
+      outpath=$mcpath/ceres/`printf %03d $cereskey`/`printf %08d $drun | cut -c 1-4`/`printf %08d $drun | cut -c 5-8`
+      makedir $outpath
+      log=$outpath/ceres`printf %08d $run `
+      command="./ceres -b -q -f --config=$setupfile --out=$outpath --log=$log.log --html=$log.html --run-number=$run pedestal"
+      printprocesslog "INFO executing "$command
+      $command
+      check1=$?
+      ;;
+   4) printprocesslog "INFO run $run is a calibration run (key="$runkey")"
+      drun=`echo "$run + 1 " | bc -l`
+      outpath=$mcpath/ceres/`printf %03d $cereskey`/`printf %08d $drun | cut -c 1-4`/`printf %08d $drun | cut -c 5-8`
+      makedir $outpath
+      log=$outpath/ceres`printf %08d $run `
+      command="./ceres -b -q -f --config=$setupfile --out=$outpath --log=$log.log --html=$log.html --run-number=$run calibration"
+      printprocesslog "INFO executing "$command
+      $command
+      check1=$?
+      ;;
+   *) printprocesslog "WARN not valid fRunTypeKEY ("$runkey") was queried from the DB."
+      check="no"
+      ;;
+esac
+
+case $check1 in
+   0)   printprocesslog "INFO $program finished successfully for run $run, cereskey $cereskey, runtype $runkey (check1=$check1)"
+        ;;
+   *)   printprocesslog "ERROR $program failed for run $run, cereskey $cereskey, runtype $runkey (check1=$check1)"
+        check=$check1
+        ;;
+esac
+
+setstatus "stop"
+
+finish
+
Index: branches/trigger_burst_research/Processing/RunMCCallisto.sh
===================================================================
--- branches/trigger_burst_research/Processing/RunMCCallisto.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/RunMCCallisto.sh	(revision 18288)
@@ -0,0 +1,65 @@
+#!/bin/bash
+#
+# This script is launching the calibration of mc sequences. 
+# 
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=callisto
+step=Callisto
+
+set -C
+
+# get sequence # 
+numchanged=0
+while (( 0 < 21 ))
+do 
+   if ! [ $numchanged -eq 1 ]
+   then 
+      if ! [ "$sequence" = "" ] && ! [ "$cereskey" = "" ]
+      then
+         printprocesslog "INFO $program for sequence $sequence cereskey $cereskey is already running => request new number "
+      fi
+      gettodo "1" 
+      sequence=${primaries[0]}
+      cereskey=${primaries[1]}
+      if [ "$sequence" = "" ] || [ "$cereskey" = "" ]
+      then
+         printprocesslog "INFO nothing to do for $program "
+      fi
+   else
+      printprocesslog "INFO starting $program for sequence $sequence cereskey $cereskey"
+      break
+   fi
+   setstatus "start"
+done
+
+cd $mars
+
+# run calibration for sequence
+
+# define files and paths
+sequfile="$mcsequpath/`printf %08d $sequence | cut -c 1-4`/sequence`printf %08d $sequence`.txt"
+outpath=$mcpath/$program/`printf %03d $cereskey`/`printf %08d $sequence | cut -c 1-4`/`printf %08d $sequence | cut -c 5-8`
+makedir $outpath
+log=$outpath/$program`printf %08d $sequence`
+callistorc=$setuppath/$program/`printf %03d $cereskey`/callisto.rc
+inpath=$mcpath/ceres/`printf %03d $cereskey`/`printf %08d $sequence | cut -c 1-4`/`printf %08d $sequence | cut -c 5-8`
+
+command="./callisto -b -q -v4 -f --out=$outpath --ind=$inpath --log=$log.log --html=$log.html --config=$callistorc $sequfile"
+printprocesslog "INFO executing "$command
+$command
+check1=$?
+
+case $check1 in
+   0)  printprocesslog "INFO $program finished successfully for sequence $sequence cereskey $cereskey (return code $check1)"
+       ;;
+   *)  printprocesslog "ERROR $program failed for sequence $sequence cereskey $cereskey (return code $check1)"
+       check=$check1
+       ;;
+esac
+
+setstatus "stop"
+
+finish
+
Index: branches/trigger_burst_research/Processing/RunMCStar.sh
===================================================================
--- branches/trigger_burst_research/Processing/RunMCStar.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/RunMCStar.sh	(revision 18288)
@@ -0,0 +1,64 @@
+#!/bin/bash
+#
+# This script is launching star for mc sequences. 
+# 
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=star
+step=Star
+
+set -C
+
+# get sequence # 
+numchanged=0
+while (( 0 < 21 ))
+do 
+   if ! [ $numchanged -eq 1 ]
+   then 
+      if ! [ "$sequence" = "" ] && ! [ "$cereskey" = "" ]
+      then
+         printprocesslog "INFO $program for sequence $sequence cereskey $cereskey is already running => request new number "
+      fi
+      gettodo "1" 
+      sequence=${primaries[0]}
+      cereskey=${primaries[1]}
+      if [ "$sequence" = "" ] || [ "$cereskey" = "" ]
+      then
+         printprocesslog "INFO nothing to do for $program "
+      fi
+   else
+      printprocesslog "INFO starting $program for sequence $sequence cereskey $cereskey"
+      break
+   fi
+   setstatus "start"
+done
+
+cd $mars
+
+# run calibration for sequence
+
+# define files and paths
+sequfile="$mcsequpath/`printf %08d $sequence | cut -c 1-4`/sequence`printf %08d $sequence`.txt"
+outpath=$mcpath/$program/`printf %03d $cereskey`/`printf %08d $sequence | cut -c 1-4`/`printf %08d $sequence | cut -c 5-8`
+makedir $outpath
+log=$outpath/$program`printf %08d $sequence`
+inpath=$mcpath/callisto/`printf %03d $cereskey`/`printf %08d $sequence | cut -c 1-4`/`printf %08d $sequence | cut -c 5-8`
+
+command="./star -b -q -v4 -f --ind=$inpath --out=$outpath --log=$log.log --html=$log.html $sequfile"
+printprocesslog "INFO executing "$command
+$command
+check1=$?
+
+case $check1 in
+   0)  printprocesslog "INFO $program finished successfully for sequence $sequence cereskey $cereskey (return code $check1)"
+       ;;
+   *)  printprocesslog "ERROR $program failed for sequence $sequence cereskey $cereskey (return code $check1)"
+       check=$check1
+       ;;
+esac
+
+setstatus "stop"
+
+finish
+
Index: branches/trigger_burst_research/Processing/RunMoreNights.sh
===================================================================
--- branches/trigger_burst_research/Processing/RunMoreNights.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/RunMoreNights.sh	(revision 18288)
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+doupdate="yes"
+#doupdate="no"
+
+# get all nights
+getdates "all"
+# get one single night
+#getdates "2013/08/13"
+# get several nights
+#getdates 15
+
+usesge="yes"
+#usesge="no"
+
+echo ${dates[@]}
+
+scripts=( \
+#        ~/DataCheck/Processing/FillAuxCurrents.sh \
+#        ~/DataCheck/Processing/FillAuxTemp.sh \
+#        ~/DataCheck/Processing/FillDrsTemp.sh \
+#        ~/DataCheck/Processing/FillAuxData.sh \
+#	~/DataCheck/QuickLook/Step2a.sh \
+#        ~/DataCheck/Processing/FillEffectiveOn.sh \
+        ~/DataCheck/Processing/FillNumEvts.sh \
+#        ~/DataCheck/Processing/FillAuxThresholds.sh \
+#        ~/DataCheck/Processing/FillMoonInfo.sh \
+        )
+
+# start scripts for that night
+for date in ${dates[@]}
+do 
+   echo $date
+   for script in ${scripts[@]}
+   do
+#      echo $script" for "$date
+      name=`basename $script | sed -e 's/Fill//' -e 's/Aux//' | cut -c 1-3``echo $date | sed -e 's/\///g'`
+      date2=`echo $date | sed -e 's/\//-/g' | cut -c 3-8`
+      #echo $name 
+      if [ "$usesge" = "yes" ]
+      then 
+         echo "INFO submitting "$script" to the cluster for the night "$date
+         printprocesslog "INFO submitting "$script" to the cluster for the night "$date
+         $sgepath/qsub -b y -q fact_medium -v doupdate=$doupdate -v certaindate=$date -v AUTOMATIONSETUP=$AUTOMATIONSETUP -e $runlogpath"/error-"$date2".log" -o $runlogpath"/log-"$date2".log" -N $name $script $date
+      else
+         echo "INFO starting "$script" for date "$date" on "$HOST" ... "
+         printprocesslog "INFO starting "$script" for date "$date" on "$HOST" ... "
+         export certaindate="$date"
+         export doupdate="$doupdate"
+         $script #&
+      fi
+   done
+done
+
+finish
+
+
Index: branches/trigger_burst_research/Processing/RunStar.sh
===================================================================
--- branches/trigger_burst_research/Processing/RunStar.sh	(revision 18288)
+++ branches/trigger_burst_research/Processing/RunStar.sh	(revision 18288)
@@ -0,0 +1,75 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=RunStar
+step=Star
+
+set -C
+
+for (( i=0; i<100; i++ ))
+do 
+   # get todo list
+   gettodo "1"
+
+   # get all needed variables
+   night=${primaries[0]}
+   seqid=${primaries[1]}
+   nightpath=`echo $night | cut -c 1-4`"/"`echo $night | cut -c 5-6`"/"`echo $night | cut -c 7-8`
+   seqnum=$night"_"`printf %03d $seqid`
+
+   # check if script is already running
+   lockfile=$lockpath"/lock-"$program"-"$seqnum".txt"
+   checklock continue
+   printprocesslog "INFO got todo with try #"$i
+   break
+done
+
+# get all needed paths and files
+seqfile=$seqpath"/"$nightpath"/"$seqnum".seq"
+inpath=$datapath"/callisto/"$nightpath
+outpath=$datapath"/star/"$nightpath
+makedir $outpath
+logfile=$outpath"/"$seqnum"-images.log"
+
+cd $mars
+
+# run star 
+printprocesslog "INFO starting star.C for sequence "$seqnum
+setstatus "start" 
+
+#check1=`root -q -b fact/star.C\("\"$seqfile\""\,7.5\,3.9\,"\"$inpath\""\,"\"$outpath\""\) | tee $logfile | intgrep`
+# new cleaning levels starting from version 2012.06.22
+#check1=`root -q -b fact/analysis/star.C\("\"$seqfile\""\,4.0\,2.5\,"\"$inpath\""\,"\"$outpath\""\) | tee $logfile | intgrep`
+# new cleaning levels for the new calibration
+#check1=`root -q -b fact/analysis/star.C\("\"$seqfile\""\,5.2\,3.3\,"\"$inpath\""\,"\"$outpath\""\) | tee $logfile | intgrep`
+# new cleaning
+check1=`root -q -b fact/analysis/star.C\("\"$seqfile\""\,"\"$inpath\""\,"\"$outpath\""\) | tee $logfile | intgrep`
+
+case $check1 in
+   0)   printprocesslog "INFO star was successful for sequence "$seqnum" (check1=$check1)"
+        ;;
+   *)   printprocesslog "ERROR star.C failed for sequence "$seqnum" (check1=$check1)"
+        check=$check1
+        ;;
+esac
+
+# run merpp
+printprocesslog "INFO starting merpp3.C for sequence "$seqnum
+logfile=$outpath"/"$seqnum"-merpp.log"
+
+check1=`root -q -b fact/analysis/merpp.C\("\"$seqfile\""\,"\"$outpath\""\) | tee $logfile | intgrep`
+
+case $check1 in
+   0)   printprocesslog "INFO merpp was successful for sequence "$seqnum" (check1=$check1)"
+        ;;
+   *)   printprocesslog "ERROR merpp3.C failed for sequence "$seqnum" (check1=$check1)"
+        check=10
+        ;;
+esac
+
+setstatus "stop" 
+
+finish 
+
Index: branches/trigger_burst_research/Processing/fill_sqm_data_into_db.py
===================================================================
--- branches/trigger_burst_research/Processing/fill_sqm_data_into_db.py	(revision 18288)
+++ branches/trigger_burst_research/Processing/fill_sqm_data_into_db.py	(revision 18288)
@@ -0,0 +1,171 @@
+#!/usr/bin/env python2 
+# coding: utf-8
+"""
+authors: Max Ahnen, Dominik Neise
+----------------------------------------------------------------------------
+"THE BEER-WARE LICENSE" (Revision 42):
+Max Ahnen and Dominik Neise wrote this file. As long as you retain this notice you
+can do whatever you want with this stuff. If we meet some day, and you think
+this stuff is worth it, you can buy us a beer.
+----------------------------------------------------------------------------
+
+This script calculates the mean of the SQM magnitude for a given run
+(run is given by its start_ and stop_time)
+and the p-value for a linear fit.
+
+We hope that this will be used to separate good data, where 
+the linear fit is perfect (p-value > 1e-3?) and bad fits, 
+where clouds let the magnitude brightness fluctuate stronger,
+so the fit worsens.
+
+Returns: a string
+  "results {mean magnitude:f} {p-value:f}"   
+"""
+
+from astropy.io import fits
+import numpy as np
+import scipy as sp
+import sys
+import ROOT 
+
+import pandas as pd
+from sqlalchemy import create_engine
+
+import glob
+
+from calendar import timegm
+import time
+
+database = {
+    'user': 'factread',
+    'password': 'r3adfac!',
+    'host': '129.194.168.95',
+    'table': 'factdata',
+}
+
+root_database = {
+    'user': 'root',
+    'password': '1440Gapd',
+    'host': 'localhost',
+    'table': 'factdata',
+}
+
+
+db_string = '{user}:{password}@{host}/{table}'
+
+try:
+    factdb = create_engine('mysql+mysqldb://'+ db_string.format(**database))
+    factdb_root = create_engine('mysql+mysqldb://'+ db_string.format(**root_database))
+except ImportError:
+    factdb = create_engine('mysql+pymysql://'+db_string.format(**database))
+    factdb_root = create_engine('mysql+pymysql://'+db_string.format(**root_database))
+
+
+def get_list_of_SQM_files(base_path='/daq/aux'):
+    return sorted(glob.glob(base_path+'/*/*/*/*.SQM_CONTROL_DATA.fits'))
+
+def get_y_m_d(file_path):
+    s = file_path.split('/')[-1].split('.')[0]
+    return s[0:4], s[4:6], s[6:8]
+
+def get_night(file_path):
+    return file_path.split('/')[-1].split('.')[0]
+
+def mag_mean_p_value(fits_file_path, start_time, stop_time):
+
+	d = fits.open(fits_file_path)[1].data
+	d = d[(d['Time'] > start_time) * (d['Time'] < stop_time)]
+        if len(d)==0:
+            return None
+
+	x =  d['Time'].copy() - d['Time'][0]
+        y = d['Mag'].astype(np.float64).copy()
+        start_time, stop_time = x[0], x[-1]
+
+        sigma  = 0.025 / np.sqrt(12.)
+	g = ROOT.TGraphErrors(len(x), x, y, np.zeros(len(x), dtype=np.float64), np.ones(len(x), dtype=np.float64)*sigma)
+	f = ROOT.TF1("linfit", "pol1", start_time, stop_time)
+	g.Fit(f, "E")
+        
+        function = f.Eval
+	y_fit = map(function,  x)
+ 
+        result = {
+            'fSqmMagMean' : d['Mag'].mean(),
+            'fSqmMagLinFitPValue' : f.GetProb(),
+            'fSqmMagLinFitChi2' : f.GetChisquare(),
+            'fSqmMagLinFitNdf' : f.GetNDF(),
+            'fSqmMagLinFitSlope' : f.GetParameters()[1],
+        }
+
+	return result
+
+def timestamp_from_string(stri):
+    return timegm(time.strptime(stri.replace('Z', 'UTC'),'%Y-%m-%d %H:%M:%S%Z'))
+
+def update_dict_in_database(some_dict, db, primary_keys=("fNight", "fRunID")):
+    commands = []
+    commands.append('BEGIN;')
+
+    # "UPDATE RunInfo SET weight = 160, desiredWeight = 145 WHERE id = 1;"
+    update_string = "UPDATE RunInfo SET "
+    first = True
+    for k in some_dict:
+        if k not in primary_keys:
+            if not first:
+                update_string += ', '
+            else:
+                first = False
+
+            update_string += "{0} = {1}".format(k, some_dict[k])
+    update_string += " WHERE "
+    first = True
+    for k in some_dict:
+        if k in primary_keys:
+            if not first:
+                update_string += ' AND '
+            else:
+                first = False
+            update_string += "{0} = {1}".format(k, some_dict[k])
+    update_string += ';'
+
+    commands.append(update_string)
+    commands.append('COMMIT;')
+
+    # print commands
+    for com in commands:
+        db.engine.execute(com)
+
+    
+if __name__ == "__main__":
+    aux_files = get_list_of_SQM_files()
+    for aux_file in aux_files:
+      night = get_night(aux_file)
+
+      query = ("SELECT fRunID, fRunStart, fRunStop from RunInfo"
+               " WHERE fNight={0}").format(night)
+
+      df = pd.read_sql_query(query, factdb)
+
+      for i in range(df.shape[0]):
+          row = df.iloc[i]
+
+          try:
+              run_start = timestamp_from_string(str(row['fRunStart'])+'Z')/(24.*3600.)
+              run_stop = timestamp_from_string(str(row['fRunStop'])+'Z')/(24.*3600.)
+              run_id = row['fRunID']
+          except ValueError as e:
+              print e
+              print row['fRunStart'], row['fRunStop']
+              continue
+
+          result= mag_mean_p_value(aux_file, run_start, run_stop)
+          if result is None:
+              continue
+          result['fNight'] = int(night)
+          result['fRunID'] = run_id
+
+          update_dict_in_database(result, factdb_root)
+
+          print time.asctime(), aux_file, run_start, run_stop, result
+
Index: branches/trigger_burst_research/QuickLook/ReDoStar.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/ReDoStar.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/ReDoStar.sh	(revision 18288)
@@ -0,0 +1,55 @@
+#!/bin/bash
+#
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+#root=/opt/root_svn/bin/thisroot.sh
+#source $root
+
+anapath=/loc_data/analysis
+auxpath=/loc_data/aux
+
+printprocesslog "INFO get list of calibrated files"
+
+date="2014/05/24"
+#calfiles=( `find $anapath -type f -name '*_C.root' 2>/dev/null | grep -v failed_stuff | sort` )
+calfiles=( `find $anapath/callisto/$date -type f -name '*_C.root' 2>/dev/null | sort` )
+if [ ${#calfiles[@]} -eq 0 ]
+then
+   printprocesslog "INFO no calibrated files available "
+   finish
+fi
+#starfiles=( `find $anapath -type f -name '*_I.root' 2>/dev/null | grep -v failed_stuff | sort` )
+starfiles=( `find $anapath/star/$date -type f -name '*_I.root' 2>/dev/null | sort` )
+if [ ${#starfiles[@]} -eq 0 ]
+then
+   printprocesslog "INFO no star files available "
+#   finish
+fi
+
+echo ${#calfiles[@]}" "${#starfiles[@]}
+printprocesslog "INFO #cal-files:"${#calfiles[@]}" #star-files:"${#starfiles[@]}
+
+if [ ${#starfiles[@]} -lt ${#calfiles[@]} ] 
+then
+   for calfile in ${calfiles[@]}
+   do 
+      starfile=`echo $calfile | sed -e 's/callisto/star/' -e 's/_C/_I/'`
+      logfile=`echo $starfile | sed -e 's/_I.root/-images.log/'`
+      if ! ls $starfile >/dev/null 2>&1
+      then
+         echo $starfile" is missing -> reprocess. "
+         outpath=`dirname $starfile`
+         makedir $outpath
+         `dirname $0`/RunStar.sh $logfile $calfile $outpath $starfile &	
+         if ! ls $starfile >/dev/null 2>&1
+         then 
+            echo "  failed: check log "`echo $starfile | sed -e 's/_I.root/-images.log/'`
+            echo "          check calfile "$calfile
+         fi
+         echo ""
+      fi
+   done
+fi
+
+
Index: branches/trigger_burst_research/QuickLook/RunCallisto.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/RunCallisto.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/RunCallisto.sh	(revision 18288)
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+logfile=$2
+
+# make sure that no other RunCallisto.sh is started for this run
+touch $logfile
+
+cd $mars
+
+# check how many callistos are running
+callistocount=`ps aux | grep callisto_ | grep -E -o '20[12][0-9][01][0-9][0-3][0-9]_[0-9][0-9][0-9]' | sort | uniq | wc -l`
+
+while [ $callistocount -ge $numcallistos ]
+do
+   source `dirname $0`/../Sourcefile.sh
+   printprocesslog "INFO wait "$callistowait" as "$callistocount" callistos running ( "$numcallistos" allowed). "
+   sleep $callistowait
+   callistocount=`ps aux | grep callisto_ | grep -E -o '20[12][0-9][01][0-9][0-3][0-9]_[0-9][0-9][0-9]' | sort | uniq | wc -l`
+done
+
+# get all needed paths and files
+delays="resources/delays-20150217.txt"
+drstimefiles=`ls $drstimepath | sort`
+for drstimefile in $drstimefiles
+do 
+   num=`echo $drstimefile | cut -c 1-8`
+   if [ $num -gt $night ]
+   then
+      break
+   fi
+   drstime=$drstimepath/$drstimefile
+done
+   
+case $1 in
+   drun) # process data run
+         printprocesslog "DEBUG fact/analysis/callisto_data.C\("\"$3\""\,"\"$4\""\,"\"$drstime\""\,"\"$delays\""\,"\"$5\""\) | tee $logfile "
+         check1=`root -q -b fact/analysis/callisto_data.C\("\"$3\""\,"\"$4\""\,"\"$drstime\""\,"\"$delays\""\,"\"$5\""\) | tee $logfile | intgrep`
+         ;;
+   prun) # process pedestal run
+         # currently not done (s Step1.sh)
+         echo "processing pedetal runs currently not implemented"
+         # printprocesslog "DEBUG fact/analysis/callisto_pedestal.C\("\"$3\""\,"\"$4\"","\"$5\""\) | tee $logfile "
+         #check1=`root -q -b fact/analysis/callisto_pedestal.C\("\"$3\""\,"\"$4\"","\"$5\""\) | tee $logfile | intgrep`
+         ;;
+   crun) # process light-pulser run
+         # currently not done (s Step1.sh)
+         echo "processing light-pulser runs currently not implemented"
+         #printprocesslog "DEBUG fact/analysis/callisto_lightpulser.C\("\"$3\""\,"\"$4\""\,"\"$5\""\,"\"$5\""\) | tee $logfile "
+         #check1=`root -q -b fact/analysis/callisto_lightpulser.C\("\"$3\""\,"\"$4\""\,"\"$5\""\,"\"$5\""\) | tee $logfile | intgrep`
+         ;;
+   time) # do drs-time-calib
+         printprocesslog "DEBUG fact/analysis/callisto_drstime.C\("\"$3\""\,"\"$4\"","\"$5\""\) | tee $logfile "
+         check1=`root -q -b fact/analysis/callisto_drstime.C\("\"$3\""\,"\"$4\"","\"$5\""\) | tee $logfile | intgrep`
+         ;;
+   *)    # default
+         printprocesslog "WARN wrong parameter given to script RunCallisto.sh :"$1
+         ;;
+esac
+
+case $check1 in
+   0)   printprocesslog "INFO callisto was successful for file "$3" (check1=$check1)"
+        if [ "$1" == "drun" ]
+        then
+           printprocesslog "INFO start RunStar.sh for file "$6
+           starfile=`echo $6 | sed -e 's/callisto/star/g' -e 's/_C/_I/'`
+           starlog=`echo $starfile | sed -e 's/_I.root/-images.log/'`
+           outpath=`dirname $starfile`
+           makedir $outpath
+           `dirname $0`/RunStar.sh $starlog $6 $outpath $starfile
+        fi
+        ;;
+   *)   printprocesslog "ERROR callisto failed for file "$3" (check1=$check1)"
+        # allow for reprocessing in case callisto fails once
+        cplogfile=$logfile.cp
+        if ! ls $cplogfile >/dev/null 2>/dev/null
+        then
+           mv $logfile $cplogfile
+        fi
+        ;;
+esac
+
+finish 
+
Index: branches/trigger_burst_research/QuickLook/RunGanymed.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/RunGanymed.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/RunGanymed.sh	(revision 18288)
@@ -0,0 +1,120 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+# get and check variables
+if ! [ "$1" = "" ]
+then
+   starfile=$1
+fi
+
+if [ "$starfile" = "" ]
+then
+   echo "ERROR variable starfile empty."
+   printprocesslog "ERROR variable starfile empty."
+fi
+
+# getting coordinates from raw and drive or DB
+date=`echo $starfile | grep -o -E '20[0-9][0-9]/[0-3][0-9]/[0-3][0-9]'`
+drivefile=$auxdata"/"$date"/"`basename $starfile | cut -d_ -f1`".DRIVE_CONTROL_SOURCE_POSITION.fits"
+rawfile=$rawdata"/"$date"/"`basename $starfile | sed -e 's/_I.root/.fits.fz/'`
+if [ -e $rawfile ]
+then 
+   printprocesslog "INFO processing "$rawfile
+   tstopi=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
+   tstopf=`$factpath/fitsdump -h $rawfile  2>/dev/null | grep TSTOPF | grep -E -o '[.][0-9]+'`
+   tstop=${tstopi}${tstopf}
+else
+   printprocesslog "WARN "$rawfile" not found."
+fi
+if [ "$tstop" == "" ]
+then
+   printprocesslog "WARN tstop empty "$rawfile
+fi
+if ! ls $drivefile >/dev/null 2>&1
+then 
+   printprocesslog "WARN "$drivefile" missing."
+fi
+if  ls $drivefile >/dev/null 2>&1  && ! [ "$tstop" == "" ]
+then
+   printprocesslog "INFO getting coordinates from "$drivefile
+   coordinates=( `${factpath}/fitsdump ${drivefile} -c Ra_src Dec_src -r --filter='Time<'${tstop}  2>/dev/null | tail -1 2>&1` )
+   if [ "${coordinates[0]}" == "" ] || [ "${coordinates[1]}" == "" ]
+   then
+      printprocesslog "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile
+      echo "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile
+      finish
+   fi
+   if [ "${coordinates[0]}" == "0" ] || [ "${coordinates[1]}" == "0" ]
+   then
+      printprocesslog "WARN coordinates "${coordinates[@]}
+      echo "WARN coordinates "${coordinates[@]}
+      finish
+   fi
+else
+   night=`echo $date | sed -e 's/\///g'`
+   runid=`basename $starfile | cut -d_ -f2`
+   printprocesslog "INFO getting coordinates from DB."
+   query="SELECT fSourceKEY FROM RunInfo WHERE fNight="$night" AND fRunID="$runid
+   sourcekey=`sendquery`
+   if [ "$sourcekey" == "" ]
+   then
+      printprocesslog "WARN sourcekey empty for "$night"_"$runid" - coordinates"${coordinates[@]}
+      finish
+   fi
+   query="SELECT Round(fRightAscension,6), Round(fDeclination,6) from Source WHERE fSourceKey="$sourcekey
+   coordinates=( `sendquery` )
+   if [ "${coordinates[0]}" == "" ] || [ "${coordinates[1]}" == "" ]
+   then
+      printprocesslog "WARN couldn't get coordinates ("${coordinates[@]}") for "$night"_"$runid" from DB."
+      finish
+   fi
+   if [ "${coordinates[0]}" == "0" ] || [ "${coordinates[1]}" == "0" ]
+   then
+      printprocesslog "WARN coordinates "${coordinates[@]}" for "$night"_"$runid
+      finish
+   fi
+fi
+ra=${coordinates[0]}
+dec=${coordinates[1]}
+
+
+outpath=`dirname $starfile | sed -e 's/star/ganymed_run/'`
+makedir $outpath
+
+output=$outpath/`basename $starfile | cut -d_ -f1-2`
+logfile=$output"-ganymed.log"
+ganymedfile=$output"-analysis.root"
+
+cd $mars
+
+# run ganymed 
+printprocesslog "INFO starting ganymed.C for starfile "$starfile
+
+printprocesslog "DEBUG root -q -b fact/analysis/ganymed.C\($ra\,$dec\,"\"$starfile\""\,"\"$output\""\) | tee $logfile | intgrep"
+check1=`root -q -b fact/analysis/ganymed.C\($ra\,$dec\,"\"$starfile\""\,"\"$output\""\) | tee $logfile | intgrep`
+
+case $check1 in
+   0)   printprocesslog "INFO ganymed.C was successful for starfile "$starfile" ra "$ra" dec "$dec" (check1=$check1)"
+        
+        echo "run numevts.C for "$ganymedfile" for table "$resulttable1 >> $logfile 2>&1
+        printprocesslog "INFO run numevts.C for "$ganyemdfile" for table "$resulttable1
+        printprocesslog "DEBUG root -q -b -l fact/processing/numevents.C+\("\"$ganymedfile"\"\,"\"$starfile"\"\,"\"$resulttable1"\"\,kFALSE\)"
+        check1=`root -q -b -l fact/processing/numevents.C+\("\"$ganymedfile"\"\,"\"$starfile"\"\,"\"$resulttable1"\"\,kFALSE\) | tee $logfile | intgrep`
+        case $check1 in
+           1)   printprocesslog "INFO filling numevts.C was successfully for "$ganymedfile" and table "$resulttable1" (check1=$check1)"
+                ;;
+           0)   printprocesslog "WARN connection to DB failed in numevts.C (check1=$check1)"
+                ;;
+           *)   printprocesslog "ERROR numevts.C failed for "$ganymedfile" and table "$resulttable1" (check1=$check1)"
+                ;;
+        esac
+        ;;
+   *)   printprocesslog "ERROR ganymed.C failed for starfile "$starfile" ra "$ra" dec "$dec" (check1=$check1)"
+        ;;
+esac
+
+finish 
+
Index: branches/trigger_burst_research/QuickLook/RunStar.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/RunStar.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/RunStar.sh	(revision 18288)
@@ -0,0 +1,48 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+logfile=$1
+
+cd $mars
+
+# run star 
+printprocesslog "INFO starting star_file.C for file "$2
+
+printprocesslog "DEBUG root -q -b fact/analysis/star_file.C\("\"$2\""\,5.2\,3.3\,"\"$3\""\) | tee $logfile | intgrep"
+check1=`root -q -b fact/analysis/star_file.C\("\"$2\""\,5.2\,3.3\,"\"$3\""\) | tee $logfile | intgrep`
+
+case $check1 in
+   0)   printprocesslog "INFO star was successful for file "$2" (check1=$check1)"
+        # run merpp
+        printprocesslog "INFO starting merpp.C for file "$4
+        logfile=`echo $logfile | sed -e 's/images/merpp/g'`
+        
+        printprocesslog "DEBUG root -q -b fact/analysis/merpp_file.C\("\"$4\""\) | tee $logfile | intgrep"
+        check2=`root -q -b fact/analysis/merpp_file.C\("\"$4\""\) | tee $logfile | intgrep`
+        
+        case $check2 in
+           0)   printprocesslog "INFO merpp was successful for file "$4" (check2=$check2)"
+                `dirname $0`/RunGanymed.sh $4
+                ;;
+           *)   printprocesslog "ERROR merpp_file.C failed for file "$4" -> check log "$logfile"  (check2=$check2)"
+                # delete image file in this case to be sure that ganymed doesn't try to process it
+                if ls $4 >/dev/null 2>&1
+                then
+                   rm -v $4
+                fi
+                ;;
+        esac
+        ;;
+   *)   printprocesslog "ERROR star_file.C failed for file "$2" -> check log "$1" (check1=$check1)"
+        if ls $4 >/dev/null 2>&1
+        then
+           rm -v $4
+        fi
+        ;;
+esac
+
+finish 
+
Index: branches/trigger_burst_research/QuickLook/Step1.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/Step1.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/Step1.sh	(revision 18288)
@@ -0,0 +1,390 @@
+#!/bin/bash
+#
+
+# remarks: 
+# rsync-server still used
+# move fileerror check to main-loop?
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+# get date (before 18h there is no new data to be processed)
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   datepath=$certaindate
+else
+   datepath=`date --date="-19HOUR" +%Y/%m/%d`
+fi
+date=`echo $datepath | sed -e 's/\///g'`
+printprocesslog "INFO processing "$datepath
+
+auxpathnewdaq=/newdaq/aux/$datepath
+# create aux directory on daq, if not yet there
+auxpath=/loc_data/aux/$datepath
+makedir $auxpath >/dev/null
+# create path for info files needed for analysis
+infopath=$anapath/info/$datepath
+makedir $infopath >/dev/null
+echo "" > $infopath/runrow.txt
+# create path for callisto output
+calpath=$anapath/callisto/$datepath
+makedir $calpath >/dev/null
+rawpathnewdaq=/newdaq/raw/$datepath
+rawpath=/loc_data/raw/$datepath
+
+# needed auxiliary files: 
+#   drive file with information about current source position
+drivefile=$auxpath/${date}.DRIVE_CONTROL_SOURCE_POSITION.fits
+drivefilenewdaq=$auxpathnewdaq/${date}.DRIVE_CONTROL_SOURCE_POSITION.fits
+#   drive file with information about tracking position
+drivefile2=$auxpath/${date}.DRIVE_CONTROL_TRACKING_POSITION.fits
+drivefilenewdaq2=$auxpathnewdaq/${date}.DRIVE_CONTROL_TRACKING_POSITION.fits
+#   file with magic weather information 
+mweatherfile=$auxpath/${date}.MAGIC_WEATHER_DATA.fits
+mweatherfilenewdaq=$auxpathnewdaq/${date}.MAGIC_WEATHER_DATA.fits
+#   file with trigger rates
+ratesfile=$auxpath/${date}.FTM_CONTROL_TRIGGER_RATES.fits
+ratesfilenewdaq=$auxpathnewdaq/${date}.FTM_CONTROL_TRIGGER_RATES.fits
+#   file with trigger rates
+tempfile=$auxpath/${date}.FSC_CONTROL_TEMPERATURE.fits
+tempfilenewdaq=$auxpathnewdaq/${date}.FSC_CONTROL_TEMPERATURE.fits
+#   file with trigger rates
+humfile=$auxpath/${date}.FSC_CONTROL_HUMIDITY.fits
+humfilenewdaq=$auxpathnewdaq/${date}.FSC_CONTROL_HUMIDITY.fits
+
+function rsync_aux_file()
+{
+   if ls $1 >/dev/null 2>&1
+   then 
+      printprocesslog "INFO rsync "$1
+      # rsync 
+      #   from newdaq (/newdaq = /fact on newdaq), rsync server newdaq::newdaq/
+      #   to daq (/daq = /loc_data on daq)
+      rsyncservernewdaq=`echo $1 | sed -e 's/^\//172.16.100.100::/'` 
+      # old
+      #if ! rsync -a -T $rsynctempdir $1 $2
+      # new (workaround for problems on daq)
+      if ! rsync -a -T $rsynctempdir $rsyncservernewdaq $2
+      then 
+         printprocesslog "WARN rsync of "$1" failed."
+      fi
+   else
+      printprocesslog "WARN "$1" missing."
+   fi
+}
+
+function check_daq()
+{
+   diskusage=( `df -P /raid10 | grep raid10 ` )
+   # check if more than 700 GB are left on /loc_data
+   if [ ${diskusage[3]} -lt $disklimitdaq ] 
+   then
+      echo "WARN less than 700 left on /raid10 on node "$HOSTNAME
+      printprocesslog "WARN less than 700 left on /raid10 on node "$HOSTNAME
+      df -h /raid10
+      finish
+   fi
+}
+
+check_daq
+
+printprocesslog "INFO get lists of raw files on newdaq and daq"
+files=( `find $rawpathnewdaq -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort` )
+# to treat links use:
+#files=( `find -L $rawpathnewdaq -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]fits[.]?[g]?[f]?[z]?' 2>/dev/null | sort` )
+
+if [ ${#files[@]} -eq 0 ]
+then
+   printprocesslog "INFO no raw files available yet for "$datepath
+   finish
+fi
+fileslocal=( `find -L $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' 2>/dev/null| sort` )
+
+callistofiles=( `find $calpath -type f -name $date*-calibration.log 2>/dev/null| sort` )
+# get number of dataruns from DB
+query="SELECT Count(*) FROM RunInfo WHERE fNight="$date" AND fRunTypeKey=1"
+numdataruns=`sendquery`
+query="SELECT Count(*) FROM RunInfo WHERE fNight="$date" AND fRunTypeKey=6"
+numlpruns=`sendquery`
+query="SELECT Count(*) FROM RunInfo WHERE fNight="$date" AND fRunTypeKey=2 AND fHasDrsFile=1 AND fROI=300"
+numpedruns=`sendquery`
+query="SELECT Count(*) FROM RunInfo WHERE fNight="$date" AND fRunTypeKey=5"
+numdrstime=`sendquery`
+numpedruns=0
+#numcalibrated=`echo " $numdataruns + $numlpruns + $numpedruns + $numdrstime " | bc -l`
+numcalibrated=`echo " $numdataruns + $numdrstime " | bc -l`
+
+# create raw directory on daq, if not yet there
+makedir $rawpath >/dev/null
+
+#echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+printprocesslog "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+
+while [ ${#fileslocal[@]} -ne ${#files[@]} ] || [ $numcalibrated -ne ${#callistofiles[@]} ] # || [ $numcalibrated -ne 0 ] # FIXME: Logik ueberdenken u ueberarb
+do
+   # only continue with script 
+   #  when there is more than 10% space on daq
+   source `dirname $0`/../Sourcefile.sh
+   check_daq
+   
+   numcalibrated=0
+   #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+   printprocesslog "INFO status beginning of while-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+
+   rsync_aux_file $drivefilenewdaq $drivefile
+   
+   # files on newdaq
+   for file in ${files[@]}
+   do
+      printprocesslog "processing "$file
+      #echo "processing "$file
+      localfile=`echo $file | sed -e 's/newdaq/loc_data/'`
+
+      source `dirname $0`/../Sourcefile.sh
+
+      # check if file is already transferred
+      if ! ls $localfile >/dev/null 2>&1 
+      then
+         # check if it is drs-file
+         #   get stop time from raw-file
+         if [ "`echo $file | grep -o drs`" == "drs" ]
+         then
+            nondrs=`basename $file | sed -e 's/[.]drs//g'`
+            nondrsfile=`find -L $rawpath -name $nondrs.*z`
+            tstop=`$factpath/fitsdump -h $nondrsfile  2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
+         else
+            tstop=`$factpath/fitsdump -h $file  2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
+         fi
+         # when stop time is 0, file is not closed
+         # when an error is returned the tstop is empty
+         if [ "$tstop" == "0" ] || [ "$tstop" == "" ]
+         then
+            printprocesslog "WARN "$file" not yet closed."
+            # if a file is not closed and not touched for 30 minutes, 
+            #    it is assumed corrupted and still transferred
+            fileaccessed=`find $file -amin -30`
+            if ! [ "$fileaccessed" == "" ]
+            then
+               printprocesslog "INFO "$file" was accessed in the last 30 minutes => continue"
+               continue
+            else
+               printprocesslog "WARN: "$file" has empty TSTOP but was not touched for 30 minutes"
+               fileerror="yes"
+            fi
+         fi
+
+         # rsync 
+         #   from newdaq (/newdaq = /fact on newdaq), rsync server newdaq::newdaq/
+         #   to daq (/daq = /loc_data on daq)
+         # to access rsync server via the dedicated network between 
+         #    daq and newdaq, use 172.16.100.100::newdaq
+         filersyncserver=`echo $file | sed -e 's/^\//172.16.100.100::/'` 
+         # old
+         ##if ! rsync -av --stats --progress --bwlimit=$bwlimit $file $localfile
+         #if ! rsync -a -T $rsynctempdir --bwlimit=$bwlimit $file $localfile
+         # new
+         if ! rsync -a -W -T $rsynctempdir --bwlimit=$bwlimit $filersyncserver $localfile
+         then
+            printprocesslog "ERROR something went wrong with rsync of "$file
+            rm $localfile
+            continue
+         fi
+         printprocesslog "INFO "$file" rsynced successfully."
+      fi
+
+      # for .drs.fits files no further treatment needed
+      if [ "`echo $localfile | grep -o drs`" == "drs" ]
+      then
+         continue
+      fi
+      
+#      # temporary check
+#      if [ "`echo $file | grep -o drs`" == "drs" ]
+#      then
+#         nondrs=`basename $file | sed -e 's/[.]drs//g'`
+#         nondrsfile=`find -L $rawpath -name $nondrs.*z`
+#         tstop=`$factpath/fitsdump -h $nondrsfile  2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
+#      else
+#         tstop=`$factpath/fitsdump -h $file  2>/dev/null | grep TSTOPI | grep -E -o '[0-9]+'`
+#      fi
+#      # when stop time is 0, file is not closed
+#      # when an error is returned the tstop is empty
+#      if [ "$tstop" == "0" ] || [ "$tstop" == "" ]
+#      then
+#         printprocesslog "WARN "$file" not yet closed. -> continue (temporary check)"
+#         continue
+#      fi
+#      # end temporary check
+      
+      # treat other files (.fits.fz)
+      runtype=`$factpath/fitsdump -h $localfile  2>/dev/null | grep RUNTYPE | grep -E -o "['][a-z0-9._-]+[']" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"`
+      if [ "$runtype" != "data" ]
+      then
+         # skip a non-data run when it has not 1000 evts 
+         #   as this means probably an fad-loss 
+         #   and these runs are repeated in that case
+         numevts=`$factpath/fitsdump -h $file  2>/dev/null | grep Events | grep -E -o '[0-9]+'`
+         if [ "$numevts" == "" ]
+         then
+            printprocesslog "WARN could not get number of events from file "$file" -> continue "
+            #echo "WARN could not get number of events from file "$file" -> continue "
+            continue
+         fi
+         if [ $numevts -ne 1000 ]
+         then
+            printprocesslog "INFO file "$file" is a non-data file ("$runtype") and has not 1000 events ("$numevts")"
+            continue
+         fi
+      fi
+
+      # get run number
+      runnum=`echo $localfile | cut -d_ -f3 | cut -d. -f1`
+      
+      # what is needed to process the different runs? 
+      #   P: run#(P), run#(drs-file)
+      #   C: run#(C), run#(drs-file), run#(drs-time)
+      #   D: run#(D), run#(drs-file), run#(drs-time), ? 
+      # what is drs-file? pedestal, roi300, has drs.fits
+      callistolog=$calpath"/"$date"_"$runnum"-calibration.log"
+      case $runtype in
+         data) # treat D-runs
+            if [ "$fileerror" = "yes" ]
+            then
+               printprocesslog "INFO do not further process corrupted file "$localfile
+               fileerror=
+               continue
+            fi
+            
+            # some accounting
+            printprocesslog "DEBUG counting callisto logs and set data files +1."
+            # get number of callisto logs
+            runcallistocount=`ps aux | grep RunCallisto | grep -E -o '20[12][0-9][01][0-9][0-3][0-9]_[0-9][0-9][0-9]' | sort | uniq | wc -l`
+            # count runs to be calibrated
+            numcalibrated=`echo " $numcalibrated + 1 " | bc -l`
+            printprocesslog "DEBUG running callistos: "$runcallistocount" #runs: "$numcalibrated" #callisto-logs: "${#callistofiles[@]}
+            
+            # do not overload system in case of a lot of files to be processed
+            #  numruncallistos is set in setup.fact.lp.data
+            if [ $runcallistocount -ge $numruncallistos ]
+            then
+               printprocesslog "INFO "$runcallistocount" RunCallisto.sh are running -> continue"
+               continue
+            fi
+            
+            # starting calibration
+            if ! [ -e $callistolog ]
+            then
+               rsync_aux_file $drivefilenewdaq2 $drivefile2
+               rsync_aux_file $mweatherfilenewdaq $mweatherfile
+               rsync_aux_file $ratesfilenewdaq $ratesfile
+               rsync_aux_file $tempfilenewdaq $tempfile
+               rsync_aux_file $humfilenewdaq $humfile
+               
+               calfile=$calpath"/"$date"_"$runnum"_C.root"
+               printprocesslog "INFO starting RunCallisto.sh for drun "$localfile" logfile "$callistolog" drs-calib "$drscalib" outpath "$outpath" calfile "$calfile
+               `dirname $0`/RunCallisto.sh "drun" $callistolog $localfile $drscalib $calpath $calfile &
+            fi
+            continue
+            ;;
+         pedestal) # treat P-runs
+            roi=`$factpath/fitsdump -h $localfile  2>/dev/null | grep ROI | grep -v ROITM | grep -E -o "[0-9][0-9][0-9][0-9]?" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"`
+            if [ $roi -eq 300 ]
+            then
+               # check drs-file
+               drsfile=`echo $localfile | sed -e 's/[.]fits[.]fz/.drs.fits/g'`
+               if [ -e $drsfile ]
+               then
+                  # set name of drs-file
+                  drscalib=$drsfile
+                  continue
+               #else
+                  # not needed for QLA
+                  #numcalibrated=`echo " $numcalibrated + 1 " | bc -l`
+                  #if ! [ -e $callistolog ]
+                  #then
+                  #   pedfile=$calpath"/"$date"_"$runnum"-pedestal.root"
+                  #   # count runs to be calibrated
+                  #   printprocesslog "INFO starting RunCallisto.sh for prun "$localfile" logfile "$callistolog" drs-calib "$drscalib" pedfile "$pedfile
+                  #   echo "INFO starting RunCallisto.sh for prun "$localfile" logfile "$callistolog" drs-calib "$drscalib" pedfile "$pedfile
+                  #  `dirname $0`/RunCallisto.sh "prun" $callistolog $localfile $drscalib $pedfile &
+                  #fi
+               fi
+            fi
+            ;;
+         light-pulser-ext) # treat C-runs
+            # do lp-treatment -> not needed for QLA
+            #lpfile=$calpath"/"$date"_"$runnum"-lightpulser.root"
+            #numcalibrated=`echo " $numcalibrated + 1 " | bc -l`
+            #if ! [ -e $callistolog ]
+            #then
+            #   if [ -e $drstime ]
+            #   then
+            #      # count runs to be calibrated
+            #      printprocesslog "INFO starting RunCallisto.sh for crun "$localfile" logfile "$callistolog" drs-calib "$drscalib" drs-time "$drstime" lpfile "$lpfile
+            #      echo "INFO starting RunCallisto.sh for crun "$localfile" logfile "$callistolog" drs-calib "$drscalib" drs-time "$drstime" lpfile "$lpfile
+            #      `dirname $0`/RunCallistoNew.sh "crun" $callistolog $localfile $drscalib $drstime $lpfile &
+            #   fi
+            #fi
+            ;;
+         drs-time) # treat drs-time runs
+            # do drs-timing calibration
+            drstime=$calpath"/"$date"_"$runnum"-drstime.root"
+            # starting calibration
+            numcalibrated=`echo " $numcalibrated + 1 " | bc -l`
+            if ! [ -e $callistolog ]
+            then
+               # count runs to be calibrated
+               printprocesslog "INFO starting RunCallisto.sh for time "$localfile" logfile "$callistolog" drs-ped "$drsped" drstime "$drstime
+               #echo "INFO starting RunCallisto.sh for time "$localfile" logfile "$callistolog" drs-ped "$drsped" drstime "$drstime
+               `dirname $0`/RunCallisto.sh "time" $callistolog $localfile $drsped $drstime &
+            fi
+            ;;
+         drs-pedestal) # get drs-pedestal
+            roi=`$factpath/fitsdump -h $localfile  2>/dev/null | grep ROI | grep -v ROITM | grep -E -o "[0-9][0-9][0-9][0-9]?" | sed -e "s/'//g" -e "s/_/-/g" -e "s/[.]//g"`
+            drs=`$factpath/fitsdump -h $localfile  2>/dev/null | grep DRSCALIB | grep -E -o " T " `
+            if [ $roi -eq 1024 ] && [ "$drs" == " T " ]
+            then
+               drsped=`echo $localfile | sed -e 's/[.]fits[.]fz/.drs.fits/g'`
+            fi
+            ;;
+         *) # other runs
+            printprocesslog "INFO file "$file" has runtype "$runtype" -> continue "
+            continue
+            ;;
+      esac
+   done
+   printprocesslog "INFO status after loop: "$runcallistocount" callistos running, "$numcalibrated" data runs to process in total, "${#callistofiles[@]}" have already a callisto-logfile "
+   
+   # get new file lists
+   printprocesslog "INFO get new file lists for "$datepath
+   files=( `find $rawpathnewdaq -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' | sort` )
+   fileslocal=( `find -L $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' | sort` )
+   callistofiles=( `find $calpath -type f -name $date*-calibration.log | sort` )
+   #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+   printprocesslog "INFO status after for-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+
+   # wait and get new file lists
+   update=
+   if [ ${#fileslocal[@]} -eq ${#files[@]} ] 
+   then 
+      printprocesslog "INFO wait 60 seconds."
+      sleep 60
+      printprocesslog "INFO get new file lists for "$datepath
+      files=( `find $rawpathnewdaq -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' | sort` )
+      fileslocal=( `find -L $rawpath -type f -regex '.*20[0-9][0-9][01][0-9][0-3][0-9][_][0-9][0-9][0-9][.]d?r?s?[.]?fits[.]?[g]?[f]?[z]?' | sort` )
+      callistofiles=( `find $calpath -type f -name $date*-calibration.log | sort` )
+   fi
+   #echo "INFO #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+   printprocesslog "INFO status after wait end of while-loop #files-daq:"${#fileslocal[@]}" #files-newdaq:"${#files[@]}" #callisto-logs:"${#callistofiles[@]}" #runs:"$numcalibrated
+   sleep 30
+   printprocesslog "INFO sleep 30"
+done
+
+
+
Index: branches/trigger_burst_research/QuickLook/Step2a.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/Step2a.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/Step2a.sh	(revision 18288)
@@ -0,0 +1,89 @@
+#!/bin/bash
+#
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   if [ "$1" = "" ]
+   then 
+      # get all night
+      #getdates "all"
+      # get last 6 nights if hour between 7 and 19h, else only current night
+      getdates 6 7 19
+   else
+      getdates $1
+   fi
+fi
+
+
+
+for date in ${dates[@]}
+do 
+   printprocesslog "INFO processing "$date
+   #echo "INFO processing "$date
+   night=`echo $date | sed -e 's/\///g'`
+   
+   numganymeds=0
+   # getting all image files for this night
+   printprocesslog "INFO get file list for night "$date
+   files=( `find $anapath/star/$date -type f -name '*_I.root' 2>/dev/null | sort` )
+   if [ ${#files[@]} -eq 0 ]
+   then
+      printprocesslog "INFO no image files available for night "$date
+      continue
+   fi
+
+   for file in ${files[@]}
+   do
+      night=`basename $file | cut -d_ -f1`
+      run=`basename $file | cut -d_ -f2`
+      printprocesslog "INFO processing run "$run
+      # process only if merpp-log is available to make sure that star-file is complete
+      # at ISDC seq# != run# -> get seq#
+      if echo $resulttable1 | grep ISDC >/dev/null 2>&1
+      then 
+         query="SELECT LPAD(fSequenceID, 3, 0) FROM RunInfo WHERE fNight="$night" AND fRunID="$run
+         seq=`sendquery`
+         merpplog=`dirname $file`"/"$night"_"$seq"-merpp.log"
+      else 
+         merpplog=`echo $file | sed -e 's/_I.root/-merpp.log/'`
+      fi
+      # check if merpp is finished
+      stillrunning=`find $merpplog -mmin -1 2>/dev/null`
+      if ! [ -e $merpplog ] || [ "$stillrunning" != "" ]
+      then
+         printprocesslog "INFO merpp still running - wait with file "$file
+         continue
+      fi
+      # process only if ganymedlogfiles is not yet there
+      logfile=`echo $file | sed -e 's/star/ganymed_run/' -e 's/_I[.]root/-ganymed[.]log/'`
+      if ! ls $logfile >/dev/null 2>&1
+      then
+         printprocesslog "DEBUG `dirname $0`/RunGanymed.sh "$file 
+         if [ "$qlasge" = "yes" ]
+         then
+            printprocesslog "$sgepath/qsub -b y -q fact_short -v AUTOMATIONSETUP=$AUTOMATIONSETUP -v starfile=$file -e bla.txt -o bla.txt `dirname $0`/RunGanymed.sh"
+            $sgepath/qsub -b y -q fact_short -v AUTOMATIONSETUP=$AUTOMATIONSETUP -v star=$file -e bla.txt -o bla.txt `dirname $0`/RunGanymed.sh
+         else
+            printprocesslog " INFO `dirname $0`/RunGanymed.sh $file"
+            `dirname $0`/RunGanymed.sh $file
+            echo "ganymed for "$file
+         fi
+         numganymeds=`echo " $numganymeds + 1 " | bc -l`
+      fi
+   done
+   printprocesslog "INFO found "$numganymeds" ganymeds."
+done
+
+finish
+
Index: branches/trigger_burst_research/QuickLook/Step2b.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/Step2b.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/Step2b.sh	(revision 18288)
@@ -0,0 +1,144 @@
+#!/bin/bash
+#
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+# fixme: get sources from DB
+sources=( 1 2 5 7 )
+#sources=( 1 2 5 )
+numganymeds=0
+
+printprocesslog "INFO get all nights."
+query="SELECT fNight FROM RunInfo WHERE not isnull(fSourceKey) and fNight > 20130301 GROUP BY fNight ORDER BY fNight DESC"
+nights=( `sendquery` )
+
+for source in ${sources[@]}
+do 
+   dspath=$anapath/datasets_night/$source
+   makedir $dspath >/dev/null
+done
+
+for night in ${nights[@]}
+do 
+   echo $night
+   for source in ${sources[@]}
+   do 
+      dspath=$anapath/datasets_night/$source
+      dsfile=$dspath"/dataset"$night".txt"
+      if ! ls $dsfile >/dev/null 2>&1
+      then
+         echo "# source "$source" for night "$night > $dsfile
+         linesold=0
+      else
+         lines=( `wc -l $dsfile` )
+         linesold[$source]=${lines[0]}
+      fi
+   done
+
+   inpath=$anapath/star/`echo $night | cut -c 1-4`/`echo $night | cut -c 5-6`/`echo $night | cut -c 7-8`
+   #files=`find $inpath -name '*_I.root'`
+   files=( `ls $inpath/*_I.root 2>/dev/null` )
+   if [ ${#files[@]} -eq 0 ]
+   then
+      continue
+   fi
+   echo "files: "${#files[@]}
+   for file in ${files[@]}
+   do 
+      run=`basename $file | cut -d_ -f2`
+      query="SELECT fSourceKey FROM RunInfo WHERE fNight="$night" AND fRunID="$run" GROUP BY fSourceKey"
+      source=`sendquery`
+      if [ $source -eq 0 ]
+      then
+         continue
+      fi
+      dspath=$anapath/datasets_night/$source
+      dsfile=$dspath"/dataset"$night".txt"
+      if ! grep `basename $file` $dsfile >/dev/null 2>&1
+      then 
+         if echo $resulttable1 | grep ISDC >/dev/null 2>&1
+         then 
+            query="SELECT LPAD(fSequenceID, 3, 0) FROM RunInfo WHERE fNight="$night" AND fRunID="$run
+            seq=`sendquery`
+            merpplog=`dirname $file`"/"$night"_"$seq"-merpp.log"
+         else 
+            merpplog=`echo $file | sed -e 's/_I.root/-merpp.log/'`
+         fi
+         echo $merpplog	
+         stillrunning=`find $merpplog -mmin -1 2>/dev/null`
+         if [ -e $merpplog ] && [ "$stillrunning" == "" ]
+         then
+            echo `dirname $file`" "`basename $file` >> $dsfile
+            echo `dirname $file`" "`basename $file`
+         else
+            printprocesslog "INFO merpp still running - wait with file "$file
+            continue
+         fi
+      fi
+   done
+   
+   for source in ${sources[@]}
+   do 
+      dspath=$anapath/datasets_night/$source
+      dsfile=$dspath"/dataset"$night".txt"
+      if ls $dsfile >/dev/null 2>&1
+      then
+         lines=( `wc -l $dsfile` )
+         linesnew[$source]=${lines[0]}
+         if [ ${lines[0]} -le 1 ]
+         then
+            # not ideal - what if number of files stays the same, but files change?
+            rm -v $dsfile
+            continue
+         fi
+         echo "new: "${linesnew[$source]}
+         echo "old: "${linesold[$source]}
+         if [ ${linesnew[$source]} -le ${linesold[$source]} ]
+         then
+            echo "nothing new"
+            continue
+         else
+            ls $dsfile
+         fi
+      fi
+      query="SELECT Round(fRightAscension,6), Round(fDeclination,6) from source WHERE fSourceKey="$source
+      coordinates=( `sendquery` )
+      if [ "${coordinates[0]}" == "" ] || [ "${coordinates[1]}" == "" ]
+      then
+         printprocesslog "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile
+         #echo "WARN couldn't get coordinates ("${coordinates[@]}") from "$drivefile
+         continue
+      fi
+      if [ "${coordinates[0]}" == "0" ] || [ "${coordinates[1]}" == "0" ]
+      then
+         printprocesslog "WARN coordinates "${coordinates[@]}
+         #echo "WARN coordinates "${coordinates[@]}
+         continue
+      fi
+      if ls $dsfile >/dev/null 2>&1
+      then
+         outpath=$anapath/ganymeds_night/$source
+         logfile=$outpath"/ganymed"$night".log"
+         echo $logfile
+         #ls $logfile
+         if ! ls $logfile >/dev/null 2>&1
+         then
+            ls $dsfile
+            if [ "$qlasge" = "yes" ]
+            then
+               printprocesslog "$sgepath/qsub -b y -q fact_short -v AUTOMATIONSETUP=$AUTOMATIONSETUP -v dsfile=$dsfile -v ra=${coordinates[0]} -v dec=${coordinates[1]} -e bla.txt -o bla.txt `dirname $0`/RunGanymed.sh"
+               $sgepath/qsub -b y -q fact_short -v AUTOMATIONSETUP=$AUTOMATIONSETUP -v dsfile=$dsfile -v ra=${coordinates[0]} -v dec=${coordinates[1]} -e bla.txt -o bla.txt `dirname $0`/RunGanymed.sh
+            else
+               `dirname $0`/RunGanymed.sh $dsfile ${coordinates[0]} ${coordinates[1]}
+            fi
+            #echo "`dirname $0`/RunGanymed.sh $source $dsfile"
+            #`dirname $0`/RunGanymed.sh $source $dsfile
+            numganymeds=`echo " $numganymeds + 1 " | bc -l`
+         fi
+      fi
+   done
+
+   echo "found "$numganymeds" ganymeds."
+done
+
+finish
Index: branches/trigger_burst_research/QuickLook/Step3.sh
===================================================================
--- branches/trigger_burst_research/QuickLook/Step3.sh	(revision 18288)
+++ branches/trigger_burst_research/QuickLook/Step3.sh	(revision 18288)
@@ -0,0 +1,266 @@
+#!/bin/bash
+#
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0 $1"
+
+export LD_LIBRARY_PATH=./:$LD_LIBRARY_PATH
+
+cd $mars
+
+if ! [ "$1" = "run" ] && ! [ "$1" = "night" ]
+then 
+   #echo "wrong option "$1" (only min and nights valid)."
+   echo "wrong option "$1" (only run and night valid)."
+   finish
+fi
+
+if [ "$1" = "run" ]
+then 
+   tablename=$resulttable1
+   timebins=( 5 10 20 30 40 60 90 120 180 240 300 360 )
+   timename="min"
+fi
+
+if [ "$1" = "night" ]
+then
+   tablename=$resulttable2
+   timebins=( 1 2 3 4 5 6 7 8 9 10 )
+   timename="night"
+fi
+
+# possibility to give table name as second option
+if [ "$2" != "" ]
+then 
+   tablename=$2
+else
+   printprocesslog "INFO no second option given -> using table resulttable1 or resulttable2 for producing plots. "
+fi
+
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 6 nights if hour between 7 and 19h, else only current night
+   getdates 6 7 19
+fi
+
+#echo ${dates[@]}
+for date in ${dates[@]}
+do 
+   date2=`echo $date | sed -e 's/\///g'`
+   if [ $date2 -le $firstnight ]
+   then
+      continue
+   fi
+   query="SELECT fSourceKEY FROM RunInfo WHERE fNight="$date2" AND fSourceKey > 0 AND fRunTypeKEY=1 GROUP BY fSourceKey "
+   sources=( `sendquery` )
+   printprocesslog "INFO process "$date" -> sources: "${sources[@]}
+   for source in ${sources[@]}
+   do 
+      #echo $source
+      printprocesslog "INFO processing files for source "$source
+      outpath=$anapath/lightcurves/$date
+      makedir $outpath >/dev/null
+
+      for timebin in ${timebins[@]}
+      do 
+         # doing the plots for one night
+         logfile=$outpath"/lightcurve"$source"_"$timebin$timename"_"$date2".log"
+         outfile=$outpath"/lightcurve"$source"_"$timebin$timename"_"$date2".root"
+         #echo $outfile
+         printprocesslog "INFO starting lightcurve.C+ for source "$source" timebin "$timebin" "$timename" night "$date2
+      
+	 check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$date2\\,$date2\,$timebin\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+      
+         case $check1 in
+            0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2" (check1=$check1)"
+                 if ! ./showplot -b --save-as-png $outfile >/dev/null
+                 then
+                    printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+                 fi
+                 ;;
+            1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2", but nothing to plot (check1=$check1)"
+                 ;;
+            *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin "$timebin" "$timename" night "$date2" (check1=$check1)"
+                 ;;
+         esac
+
+         
+         # doing the plots for last week 
+         lastweek=`date +%Y%m%d --date="$date -1 week"`
+         logfile=$outpath"/lightcurve"$source"_"$timebin$timename"_week.log"
+         outfile=$outpath"/lightcurve"$source"_"$timebin$timename"_week.root"
+         #echo $outfile
+         printprocesslog "INFO starting lightcurve.C+ for source "$source" timebin "$timebin" "$timename" night "$date2" last week ("$lastweek")"
+      
+	 check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$lastweek\\,$date2\,$timebin\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+      
+         case $check1 in
+            0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2" last week (check1=$check1)"
+                 #./showplot -b --save-as-png $outfile
+                 if ! ./showplot -b --save-as-png $outfile >/dev/null
+                 then
+                    printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+                 fi
+                 ;;
+            1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2" last week, but nothing to plot (check1=$check1)"
+                 ;;
+            *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin "$timebin" "$timename" night "$date2" last week (check1=$check1)"
+                 ;;
+         esac
+
+         # doing the plots for last month
+         lastmonth=`date +%Y%m%d --date="$date -28 day"`
+         logfile=$outpath"/lightcurve"$source"_"$timebin$timename"_month.log"
+         outfile=$outpath"/lightcurve"$source"_"$timebin$timename"_month.root"
+         #echo $outfile
+         printprocesslog "INFO starting lightcurve.C+ for source "$source" timebin "$timebin" "$timename" night "$date2" last month ("$lastmonth")"
+      
+	 check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$lastmonth\\,$date2\,$timebin\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+      
+         case $check1 in
+            0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2" last month (check1=$check1)"
+                 #./showplot -b --save-as-png $outfile
+                 if ! ./showplot -b --save-as-png $outfile >/dev/null
+                 then
+                    printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+                 fi
+                 ;;
+            1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2" last month, but nothing to plot (check1=$check1)"
+                 ;;
+            *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin "$timebin" "$timename" night "$date2" last month (check1=$check1)"
+                 ;;
+         esac
+
+         # doing the plots for all nights
+         logfile=$outpath"/lightcurve"$source"_"$timebin$timename"_all.log"
+         outfile=$outpath"/lightcurve"$source"_"$timebin$timename"_all.root"
+         #echo $outfile
+         printprocesslog "INFO starting lightcurve.C+ for source "$source" timebin "$timebin" "$timename" night "$date2" all nights"
+      
+	 check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$firstnight\\,$date2\,$timebin\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+      
+         case $check1 in
+            0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2" all nights (check1=$check1)"
+                 #./showplot -b --save-as-png $outfile
+                 if ! ./showplot -b --save-as-png $outfile >/dev/null
+                 then
+                    printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+                 fi
+                 ;;
+            1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin "$timebin" "$timename" night "$date2" all nights, but nothing to plot (check1=$check1)"
+                 ;;
+            *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin "$timebin" "$timename" night "$date2" all nights (check1=$check1)"
+                 ;;
+         esac
+      done
+      
+      
+      # doing nightly plots
+      
+      # doing the plots for one night
+      logfile=$outpath"/lightcurve"$source"_1night_"$date2".log"
+      outfile=$outpath"/lightcurve"$source"_1night_"$date2".root"
+      #echo $outfile
+      printprocesslog "INFO starting lightcurve.C+ for source "$source" 1 night for night "$date2
+   
+      check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$date2\\,$date2\,-1\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+   
+      case $check1 in
+         0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2" (check1=$check1)"
+              if ! ./showplot -b --save-as-png $outfile >/dev/null
+              then
+                 printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+              fi
+              ;;
+         1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2", but nothing to plot (check1=$check1)"
+              ;;
+         *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin 1 night for night "$date2" (check1=$check1)"
+              ;;
+      esac
+
+      
+      # doing the plots for last week 
+      lastweek=`date +%Y%m%d --date="$date -1 week"`
+      logfile=$outpath"/lightcurve"$source"_1night_week.log"
+      outfile=$outpath"/lightcurve"$source"_1night_week.root"
+      #echo $outfile
+      printprocesslog "INFO starting lightcurve.C+ for source "$source" timebin 1 night for night "$date2" last week ("$lastweek")"
+   
+      check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$lastweek\\,$date2\,-1\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+   
+      case $check1 in
+         0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2" last week (check1=$check1)"
+              #./showplot -b --save-as-png $outfile
+              if ! ./showplot -b --save-as-png $outfile >/dev/null
+              then
+                 printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+              fi
+              ;;
+         1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2" last week, but nothing to plot (check1=$check1)"
+              ;;
+         *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin 1 night for night "$date2" last week (check1=$check1)"
+              ;;
+      esac
+
+      # doing the plots for last month 
+      lastmonth=`date +%Y%m%d --date="$date -28 day"`
+      logfile=$outpath"/lightcurve"$source"_1night_month.log"
+      outfile=$outpath"/lightcurve"$source"_1night_month.root"
+      #echo $outfile
+      printprocesslog "INFO starting lightcurve.C+ for source "$source" timebin 1 night for night "$date2" last month ("$lastmonth")"
+   
+      check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$lastmonth\\,$date2\,-1\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+   
+      case $check1 in
+         0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2" last month (check1=$check1)"
+              #./showplot -b --save-as-png $outfile
+              if ! ./showplot -b --save-as-png $outfile >/dev/null
+              then
+                 printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+              fi
+              ;;
+         1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2" last month, but nothing to plot (check1=$check1)"
+              ;;
+         *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin 1 night for night "$date2" last month (check1=$check1)"
+              ;;
+      esac
+
+      # doing the plots for all nights
+      logfile=$outpath"/lightcurve"$source"_1night_all.log"
+      outfile=$outpath"/lightcurve"$source"_1night_all.root"
+      #echo $outfile
+      printprocesslog "INFO starting lightcurve.C+ for source "$source" timebin 1 night for night "$date2" all nights"
+   
+      check1=`root -q -b fact/analysis/lightcurve.C+\($source\,$firstnight\\,$date2\,-1\,"\"$tablename"\"\,"\"$outfile\""\,kFALSE\) | tee $logfile | intgrep`
+   
+      case $check1 in
+         0)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2" all nights (check1=$check1)"
+              #./showplot -b --save-as-png $outfile
+              if ! ./showplot -b --save-as-png $outfile >/dev/null
+              then
+                 printprocesslog "WARN showplot -b --save-as-png "$outfile" failed."
+              fi
+              ;;
+         1)   printprocesslog "INFO lightcurve.C was successful for source "$source" timebin 1 night for night "$date2" all nights, but nothing to plot (check1=$check1)"
+              ;;
+         *)   printprocesslog "ERROR lightcurve.C failed for source "$source" timebin 1 night for night "$date2" all nights (check1=$check1)"
+              ;;
+      esac
+      
+   done
+done
+
+
+finish
+
Index: branches/trigger_burst_research/Setup/setup.fact.isdc
===================================================================
--- branches/trigger_burst_research/Setup/setup.fact.isdc	(revision 18288)
+++ branches/trigger_burst_research/Setup/setup.fact.isdc	(revision 18288)
@@ -0,0 +1,109 @@
+#!/bin/bash
+#
+# This a resource file for the scripts, in which paths, variables 
+# and setups are defined
+#
+# This setup file is for the machines on the FACT cluster at ISDC
+#
+
+# setup to use ftools
+export HEADAS=/swdev_nfs/heasoft-6.11.1/x86_64-unknown-linux-gnu-libc2.12 
+export HEADASPROMPT=/dev/null
+
+# setup to use ROOT
+root=/swdev_nfs/root_v5.32.00/bin/thisroot.sh
+source $root
+
+# software versions
+export factpath=/swdev_nfs/FACT++
+export mars=~/Mars.svn.r18249
+
+# site
+processingsite=isdc
+sitekey=4 
+storagesite=isdc
+
+# queuing system
+queuesys=sge
+sgepath=/usr/bin
+
+# logging and setup
+logpath=~/logs.automatic.processing/autologs
+lockpath=~/logs.automatic.processing/locks
+rsynctempdir=/scratch/rsync_tmp
+
+# data paths 
+#datapath=/scratch/fact/data.2013.05.11
+datapath=/gpfs0/fact/processing/data.r18249
+datapath_for_sed=$(printf "%s\n" "$datapath" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+drstimepath=/gpfs0/fact/processing/drs_time_calib
+seqpath=/scratch/fact/sequences
+auxdata=/fact/aux
+auxdata_for_sed=$(printf "%s\n" "$auxdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+rawdata=/fact/raw
+rawdata_for_sed=$(printf "%s\n" "$rawdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+# rawdata and ziprawdata are the same
+#  variable $ziprawdata needed for FillAuxData.sh
+ziprawdata=/fact/raw
+ziprawdata_for_sed=$(printf "%s\n" "$rawdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+
+# get paths for mars, macros and scripts
+#   in case the variable is not yet set
+if [ "$mars" == "" ]
+then 
+  mars=`dirname $0 | sed -e 's/\/datacenter\/scripts//'`
+fi
+macrospath=$mars/datacenter/macros
+scriptspath=`dirname $0`
+
+# rcfiles
+# dependencies of steps
+steps=$scriptspath/../Setup/steps_fact_isdc.rc
+# file with db information
+sqlrc=$mars/sql.rc
+
+
+#addresses to which information about full disks is sent
+deladrs="shift@fact-project.org" 
+#addresses to which the errors are sent
+erradrs="dorner@astro.uni-wuerzburg.de" 
+#addresses to which the changes are sent
+adrs="dorner@astro.uni-wuerzburg.de" 
+
+
+#
+#setup for jobmanager
+#
+sleeptime=300 #600 #30
+sleeptimelimit=360 #360
+errorsleeptimedefault=60 #60
+
+algorithm=2
+
+#hour:             0   1   2   3   4   5   6   7   8   9  10  11  12  13  14  15  16  17  18  19  20  21  22  23
+pnototal=(       180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 )
+pnototalwe=(     180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 180 )
+
+# set variables for jobmanager 
+scripts=( "RunCallisto.sh" "RunStar.sh" ) 
+queues=( "fact_medium" "fact_short" ) 
+scriptscolname=( "Callisto" "Star" ) 
+ratio=( 0.6 0.4 )
+maxjobs=( 150 50 )
+
+# fact_short 1h 192 slots
+# fact_medium 6h 128 slots
+# fact_long 168h 64 slots
+
+# further wishlist: adapt ratio automatically
+
+# setup transfer
+numrsyncwuelimit=3
+
+# setup for qla
+anapath=$datapath
+qlasge="yes"
+resulttable1="AnalysisResultsRunISDC"
+resulttable2="AnalysisResultsNightISDC"
+firstnight=20111115
+
Index: branches/trigger_burst_research/Setup/setup.fact.lp.data
===================================================================
--- branches/trigger_burst_research/Setup/setup.fact.lp.data	(revision 18288)
+++ branches/trigger_burst_research/Setup/setup.fact.lp.data	(revision 18288)
@@ -0,0 +1,95 @@
+#!/bin/bash
+#
+# This a resource file for the scripts, in which paths, variables 
+# and setups are defined
+#
+# This setup file is for the machine data in La Palma
+#  it is also used for the other machines which share the home of data
+#
+
+# setup to use ftools
+export HEADAS=/opt/heasoft-6.11/x86_64-unknown-linux-gnu-libc2.13-0/
+export HEADASPROMPT=/dev/null
+
+# setup to use ROOT
+root=/opt/root_v5.34.10/bin/thisroot.sh
+source $root
+
+# software versions
+export factpath=/home/fact/operation # path where programs of FACT++ are linked 
+export mars=/home/fact/SW.automatic.processing/Mars.svn.2014.05.26
+
+# site
+processingsite=lp
+sitekey=3
+storagesite=wue
+
+# logging and setup
+logpath=/home/fact/logs.automatic.processing/autologs
+lockpath=/home/fact/logs.automatic.processing/locks
+#setuppath=/magic/simulated/setup
+rsynctempdir=/loc_data/rsync_tmp
+if ! [ -d $rsynctempdir ]
+then
+   mkdir $rsynctempdir
+fi
+
+# data paths
+anapath=/loc_data/analysis # on daq
+#anapath=/newdaq/analysis_bu # temporarily to newdaq
+drstimepath=$anapath/drs_time_calib
+auxdata=/loc_data/aux
+auxdata_for_sed=$(printf "%s\n" "$auxdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+rawdata=/loc_data/raw
+rawdata_for_sed=$(printf "%s\n" "$rawdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+ziprawdata=/loc_data/zipraw
+ziprawdata_for_sed=$(printf "%s\n" "$ziprawdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+
+# get paths for mars, macros and scripts
+#   in case the variable is not yet set
+macrospath=$mars/datacenter/macros
+scriptspath=$mars/datacenter/scripts
+# rcfiles
+# dependencies of steps
+steps=$mars/resources/steps_fact.rc
+# file with db information
+sqlrc=$mars/sql.rc
+
+
+#addresses to which information about full disks is sent
+deladrs="shift@fact-project.org" 
+#addresses to which the errors are sent
+erradrs="dorner@astro.uni-wuerzburg.de" 
+#addresses to which the changes are sent
+adrs="dorner@astro.uni-wuerzburg.de" 
+
+
+#
+# setup QLA
+#
+
+# setup for Step1.sh
+# disklimitdaq: transfer and qla don't start when less than this is available on /daq
+#     with more than 90% disk usage the data taking is affected
+disklimitdaq=800000000 
+# bandwidth limit for transfer from newdaq to daq
+bwlimit=90000 
+# number of RunCallisto.sh started on daq
+#numruncallistos=20 
+numruncallistos=8
+
+# setup RunCallisto.sh
+# number of callisto.C (else RunCallisto.sh is doing sleep) running on daq
+#numcallistos=8
+numcallistos=5
+# seconds waiting before checking again number of callistos
+callistowait=60 
+
+# setup for ZipRawData.sh
+numprocpigz=3 # number of processes used for pigz
+limitpigz="90M" # transfer from daq to data
+
+# setup for FillNumEvts.sh and Step3.sh
+resulttable1="AnalysisResultsRunLP"
+resulttable2="AnalysisResultsNightLP"
+firstnight=20121213
Index: branches/trigger_burst_research/Setup/setup.fact.lp.gate
===================================================================
--- branches/trigger_burst_research/Setup/setup.fact.lp.gate	(revision 18288)
+++ branches/trigger_burst_research/Setup/setup.fact.lp.gate	(revision 18288)
@@ -0,0 +1,22 @@
+#!/bin/bash
+#
+# This a resource file for the scripts, in which paths, variables 
+# and setups are defined
+#
+# This setup file is for the machine gate in La Palma
+#
+
+# for db backup
+dbnames=( "mysql" "programoptions" "calendar" "systemstatus" "postfix" "horde" "logbook" "factdata" "weather" )
+
+# set only variables which are needed for the scripts running on this machine
+
+# logging and setup
+logpath=/users/fact/logs.automatic.processing/autologs
+runlogpath=/users/fact/logs.automatic.processing/autologs
+
+export mars=/users/fact/fast_setup_for_automatic_analysis/Mars.von.Thomas.2012.06.22
+
+# file with db information
+sqlrc=$mars/sql.rc
+
Index: branches/trigger_burst_research/Setup/setup.fact.wue
===================================================================
--- branches/trigger_burst_research/Setup/setup.fact.wue	(revision 18288)
+++ branches/trigger_burst_research/Setup/setup.fact.wue	(revision 18288)
@@ -0,0 +1,91 @@
+#!/bin/bash
+#
+# This a resource file for the scripts, in which paths, variables 
+# and setups are defined
+#
+# This setup file is for the machines in Wuerzburg
+#
+
+# setup to use ftools
+export HEADAS=/opt/heasoft-6.12/x86_64-unknown-linux-gnu
+export HEADASPROMPT=/dev/null
+
+# setup to use ROOT
+root=/opt/root_v5.34.06/bin/thisroot.sh
+source $root
+
+# software versions
+export factpath=/opt/FACT++
+export mars=~/opt/Mars
+
+# site
+processingsite=wue
+sitekey=1
+storagesite=wue
+
+# queuing system
+queuesys=sge
+sgepath=/opt/sge/bin/lx-amd64/
+
+# logging and setup
+logpath=/home/fact/logs.automatic.processing/autologs
+lockpath=/home/fact/logs.automatic.processing/locks
+
+# data paths 
+datapath=/fact/data
+drstimepath=$datapath
+seqpath=/fact/sequences
+auxdata=/fact/aux
+rawdata=/fact/raw
+rawdata_for_sed=$(printf "%s\n" "$rawdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g')
+
+## paths and setup for mc
+mcpath=/magic/simulated
+mcsequpath=$mcpath/sequences
+corsikapath=/opt/MagicSoft/Simulation/Corsika/Mmcs6500
+export FLUPRO=$corsikapath'/fluka2011_2'
+
+# get paths for mars, macros and scripts
+#   in case the variable is not yet set
+if [ "$mars" == "" ]
+then 
+  mars=`dirname $0 | sed -e 's/\/datacenter\/scripts//'`
+fi
+macrospath=$mars/datacenter/macros
+scriptspath=`dirname $0`
+
+# rcfiles
+# dependencies of steps
+steps=$scriptspath/../Setup/steps_fact_isdc.rc
+# file with db information
+sqlrc=$mars/sqlmc.rc
+
+
+#addresses to which the errors are sent
+erradrs="dorner@astro.uni-wuerzburg.de" 
+#addresses to which the changes are sent
+adrs="dorner@astro.uni-wuerzburg.de" 
+
+
+#
+#setup for jobmanager
+#
+sleeptime=150 #600 #30
+sleeptimelimit=360 #360
+errorsleeptimedefault=60 #60
+
+algorithm=2
+
+#hour:            0  1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
+pnototal=(       32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 )
+pnototalwe=(     32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 32 )
+
+# set variables for jobmanager 
+scripts=( "RunCeres.sh" "RunMCCallisto.sh" "RunMCStar.sh" ) 
+scriptscolname=( "Ceres" "Callisto" "Star" ) 
+ratio=( 0.6 0.2 0.2 )
+maxjobs=( 10 10 10 )
+
+
+# further wishlist: adapt ratio automatically
+
Index: branches/trigger_burst_research/Setup/steps_fact_isdc.rc
===================================================================
--- branches/trigger_burst_research/Setup/steps_fact_isdc.rc	(revision 18288)
+++ branches/trigger_burst_research/Setup/steps_fact_isdc.rc	(revision 18288)
@@ -0,0 +1,62 @@
+# Definition of Steps and Dependencies for Jobs running at ISDC
+
+RawFileRsyncedISDC.MainTable: RawFileRsyncedISDC
+RawFileRsyncedISDC.Primaries: fNight, fRunID
+RawFileRsyncedISDC.Needs: 
+RawFileRsyncedISDC.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fRunID)
+RawFileRsyncedISDC.SortDirection: ASC
+
+RawFileAvailWue.MainTable: RawFileAvailWue
+RawFileAvailWue.Primaries: fNight, fRunID
+RawFileAvailWue.Needs: RawFileRsyncedISDC
+RawFileAvailWue.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fRunID)
+RawFileAvailWue.SortDirection: ASC
+
+#RawFileAvailPhido.MainTable: RawFileAvailPhido
+#RawFileAvailPhido.Primaries: fNight, fRunID
+#RawFileAvailPhido.Needs: RawFileRsyncedISDC
+#RawFileAvailPhido.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fRunID)
+#RawFileAvailPhido.SortDirection: ASC
+
+# independent as filled by CheckRawFileAvail.sh
+RawFileAvailISDC.MainTable: RawFileAvailISDC
+RawFileAvailISDC.Primaries: fNight, fRunID
+RawFileAvailISDC.Needs: RawFileRsyncedISDC
+RawFileAvailISDC.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fRunID)
+
+AuxFilesAvailISDC.MainTable: AuxFilesAvailISDC
+AuxFilesAvailISDC.Primaries: fNight
+#AuxFilesAvailISDC.Needs: 
+AuxFilesAvailISDC.SpecialJoin: LEFT JOIN RunInfo USING(fNight)
+
+DriveFileAvailISDC.MainTable: DriveFileAvailISDC
+DriveFileAvailISDC.Primaries: fNight
+#DriveFileAvailISDC.Needs: 
+DriveFileAvailISDC.SpecialJoin: LEFT JOIN RunInfo USING(fNight)
+
+RatesFileAvailISDC.MainTable: RatesFileAvailISDC
+RatesFileAvailISDC.Primaries: fNight
+#RatesFileAvailISDC.Needs: 
+RatesFileAvailISDC.SpecialJoin: LEFT JOIN RunInfo USING(fNight)
+
+SequenceFileAvailISDC.MainTable: SequenceFileAvailISDC
+SequenceFileAvailISDC.Primaries: fNight, fSequenceID
+#SequenceFileAvailISDC.Needs: 
+SequenceFileAvailISDC.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fSequenceID)
+
+Callisto.MainTable: Callisto
+Callisto.Primaries: fNight, fSequenceID
+Callisto.Needs: RawFileAvailISDC SequenceFileAvailISDC
+Callisto.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fSequenceID)
+
+Star.MainTable: Star
+Star.Primaries: fNight, fSequenceID
+Star.Needs: RawFileAvailISDC Callisto DriveFileAvailISDC RatesFileAvailISDC SequenceFileAvailISDC
+Star.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fSequenceID)
+
+#Merpp.MainTable: Merpp
+#Merpp.Primaries: fNight, fSequenceID
+#Merpp.Needs: RawFileAvailISDC Callisto Star
+#Merpp.SpecialJoin: LEFT JOIN RunInfo USING(fNight, fSequenceID)
+
+
Index: branches/trigger_burst_research/Setup/steps_fact_wue.rc
===================================================================
--- branches/trigger_burst_research/Setup/steps_fact_wue.rc	(revision 18288)
+++ branches/trigger_burst_research/Setup/steps_fact_wue.rc	(revision 18288)
@@ -0,0 +1,20 @@
+
+Corsika.MainTable: Ceres
+Corsika.Primaries: fRunNumber, fFileNumber
+
+Ceres.MainTable: Ceres
+Ceres.Primaries: fRunNumber, fCeresSetupKEY
+Ceres.Needs: Corsika 
+Ceres.SpecialJoin: LEFT JOIN CeresInfo USING(fRunNumber, fCeresSetupKEY)
+
+SequenceFile.MainTable: Ceres
+SequenceFile.Primaries: fSequenceNumber
+
+Callisto.MainTable: Ceres
+Callisto.Primaries: fSequenceNumber, fCeresSetupKEY
+Callisto.Needs: SequenceFile Ceres Corsika 
+
+Star.MainTable: Ceres
+Star.Primaries: fSequenceNumber, fCeresSetupKEY
+Star.Needs: Callisto SequenceFile Ceres Corsika 
+
Index: branches/trigger_burst_research/Sourcefile.sh
===================================================================
--- branches/trigger_burst_research/Sourcefile.sh	(revision 18288)
+++ branches/trigger_burst_research/Sourcefile.sh	(revision 18288)
@@ -0,0 +1,512 @@
+#!/bin/bash
+
+# to treat aliases in bash-script correctly
+shopt -s expand_aliases
+
+# check if script has been started with absolute path
+if ! dirname $0 | grep -E '^/' >/dev/null 2>&1
+then 
+   echo "Please start your script with an absolute path."
+   exit
+fi
+
+if [ "$AUTOMATIONSETUP" = "" ]
+then 
+   echo "Please set the environment variable \$AUTOMATIONSETUP."
+   exit
+fi
+
+if [ "$SOURCEFILEPATH" = "" ]
+then 
+   export SOURCEFILEPATH=`dirname $0`
+fi
+if [ "$SCRIPTNAME" = "" ]
+then 
+   SCRIPTNAME=`basename $0`
+fi
+
+source $SOURCEFILEPATH/../Setup/setup.$AUTOMATIONSETUP
+
+if [ "$mars" = "" ]
+then 
+   echo "Please set the path for MARS."
+   exit
+fi
+
+datetime=`date +%F-%H-%M-%S`
+
+
+# function to make sure that a directory is made
+function makedir()
+{
+   if [ ! -d $@ ]
+   then
+      if [ "$processlog" = "" ] || [ "$logfile" = "" ]
+      then
+         mkdir -p $@ 
+      else
+         mkdir -pv $@ 
+      fi
+      if [ ! -d $@ ]
+      then 
+         if ! [ "$processlog" = "" ]
+         then
+            echo `date +%F\ %T`" "`whoami`"@"$HOSTNAME" "$SCRIPTNAME"["$$"] ERROR could not make dir "$@ >> $processlog
+         else
+            echo "could not make dir "$@
+         fi
+         if ls $lockfile >/dev/null 2>&1
+         then 
+            rm -v $lockfile
+         fi
+         exit
+      fi
+   fi
+}
+
+# logging paths for runlogs and processlog
+runlogpath=$logpath/run/`date +%Y/%m/%d`
+processlogpath=$logpath/processlog
+makedir $runlogpath
+makedir $processlogpath
+processlog=$processlogpath/process`date +%F`.log
+
+makedir $lockpath
+
+
+# function to provide proper logging in a single logfile ($processlog)
+function printprocesslog
+{
+   makedir $processlogpath
+   echo `date +%F\ %T`" "`whoami`"@"$HOSTNAME" "$SCRIPTNAME"["$$"] "$@ >> $processlog
+}
+
+# function to exit a script properly
+function finish()
+{
+   if ! [ "$lockfile" = "" ] && ls $lockfile >/dev/null 2>&1
+   then 
+      printprocesslog "DEBUG " `rm -v $lockfile`
+   fi
+   printprocesslog "DEBUG finished "$SOURCEFILEPATH"/"$SCRIPTNAME
+   exit
+}
+
+
+# set checkvalue to ok at the beginning of the scripts
+check="ok"
+
+# setup for jobmanager:
+#   log files (can't be defined in script itself, as script can run longer 
+#   than one day
+jmerrorlog=$runlogpath/jobmanager-`whoami`-$HOSTNAME-$AUTOMATIONSETUP-`date +%F`-error.log
+jmscriptlog=$runlogpath/jobmanager-`whoami`-$HOSTNAME-$AUTOMATIONSETUP-`date +%F`.log
+
+# check if rc-files are available
+if ! ls $steps >/dev/null
+then
+   echo "Can't find steps.rc ($steps)"
+   finish
+fi
+if ! ls $sqlrc >/dev/null
+then
+   echo "Can't find sql.rc ($sqlrc)"
+   finish
+fi
+
+# resetting values for jobmanager
+pno=0
+totalpno=0
+running=0
+queued=0
+runningscript=0
+queuedscript=0
+stillinqueue=0
+
+
+# alias (we cannot check the beginning of the line due to
+# color codes in filldotraw.C)
+alias 'intgrep'='grep -E -o \\\(int\\\)[0-9]+$ | grep -E -o [0-9]+'
+
+
+# in the following the functions, which are needed by several scripts, are 
+# defined
+
+# function to check if a process is already locked
+#  command line option can be used to execute something, e.g. 'continue'
+function checklock()
+{
+   if ! echo `date +%F\ %T`" "`whoami`"@"$HOSTNAME" "$SCRIPTNAME"["$$"] "`uname -a` > $lockfile 2>/dev/null
+   then 
+      if find $lockfile -amin -5
+      then
+         printprocesslog "INFO lockfile $lockfile exists"
+      else
+         printprocesslog "WARN lockfile $lockfile exists"
+      fi
+      $@
+      exit
+   else
+      printprocesslog "DEBUG created lockfile $lockfile"
+   fi
+}
+
+# print the current status values
+function printstatusvalues()
+{
+   echo "the current values are:"
+   echo " starttime=$starttime"
+   echo " stoptime=$stoptime"
+   echo " availtime=$availtime"
+   echo " returncode=$returncode"
+   echo "-- check: -$check-"
+   echo ""
+}
+
+# get the db-setup from the sql.rc
+function getdbsetup()
+{
+   db=`grep Database $sqlrc | grep -v '#' | sed -e 's/Database: //' -e 's/ //g'`
+   pw=`grep Password $sqlrc | grep -v '#' | sed -e 's/Password: //' -e 's/ //g'`
+   us=`grep User $sqlrc | grep -v '#' | sed -e 's/User: //' -e 's/ //g'`
+   ho=`grep URL $sqlrc | grep -v '#' | sed -e 's/ //g' -e 's/URL:mysql:\/\///'`
+#   echo "setup: "
+#   echo " db: "$db
+#   echo " pw: "$pw
+#   echo " us: "$us
+#   echo " ho: "$ho
+}
+
+# function to send a mysql query
+function sendquery()
+{
+   getdbsetup
+   printprocesslog "DEBUG sendquery QUERY: "$query
+   if ! val=`mysql -s -u $us --password=$pw --host=$ho $db -e " $query "`
+   then
+      printprocesslog "ERROR could not query DB "$db" on host "$ho" with user "$us
+      #printprocesslog "ERROR could not query db (program: $program, function sendquery)"
+      #return 1 #why???
+      finish
+   fi
+   if [ "$val" = "NULL" ]
+   then
+      val=
+   fi
+   echo $val
+   return 0
+}
+
+# function to get information from the setupfile $steps
+function getfromsetup()
+{
+   grep $1"[.]"$2":" $steps | grep -v '#' | sed -e "s/$1[.]$2://"
+}
+
+# function to get the needed information from the dependencies-file steps.rc
+function getstepinfo()
+{
+   getdbsetup
+   needs=( `getfromsetup $step "Needs"` )
+   noderestricted=`getfromsetup $step "NodeRestricted"`
+   prims=( `getfromsetup $step "Primaries"` )
+   maintable=`getfromsetup $step "MainTable" | sed -e "s/\ //g"`
+   sort=`getfromsetup $step "SortDirection" | sed -e "s/\ //g"`
+#   echo " maintable: "$maintable
+#   echo " needs: "${needs[@]}
+#   echo " noderestricted: "$noderestricted
+#   echo " prims: "${prims[@]}
+}
+
+# function to get the joins needed for the get/set status queries
+function getalljoins()
+{
+   # add table
+   query=$query" "$maintable"Status"
+   # add special join
+   query=$query" "`getfromsetup $maintable "SpecialJoin"`
+   # add join for step unless step is the same as maintable
+   if ! [ "$step" = "$maintable" ]
+   then 
+      query=$query" LEFT JOIN "$step"Status USING("${prims[@]}") "
+   fi
+   # add joins for influences or needs
+   for otherstep in ${othersteps[@]}
+   do
+      if ! [ "$otherstep" = "$maintable" ]
+      then 
+         query=$query" LEFT JOIN "$otherstep"Status USING("`getfromsetup $otherstep "Primaries"`") "
+      fi
+   done
+}
+
+# function to create the middle part of a query
+#  which is identical for the functions getstatus() and gettodo()
+function getstatusquery()
+{
+   # add from which table the information is queried
+   query=$query" FROM "
+   othersteps=${needs[@]}
+   getalljoins
+   # add condition
+   query=$query" WHERE "
+   # add condition for step, i.e. step is not yet done
+   query=$query" ISNULL("$step"Status.fStartTime) "
+   query=$query" AND ISNULL("$step"Status.fStopTime) "
+   query=$query" AND ISNULL("$step"Status.fAvailable) "
+   query=$query" AND ISNULL("$step"Status.fReturnCode) "
+   # add requirement for production host in case it is needed
+   if [ "$1 " != " " ]
+   then 
+      query=$query" AND fProductionHostKEY=$2 "
+   fi
+   if ! echo $query | grep UPDATE >/dev/null 2>&1
+   then 
+      query=$query" GROUP BY "${prims[@]}
+   fi
+   # add condition for needs, i.e. that step is done
+   for (( k=0 ; k < ${#needs[@]} ; k++ ))
+   do
+      if [ $k -eq 0 ]
+      then 
+         query=$query" HAVING "
+      else
+         query=$query" AND "
+      fi
+      query=$query" COUNT(*)=COUNT(IF("
+      query=$query" NOT ISNULL("${needs[$k]}"Status.fStartTime) "
+      query=$query" AND NOT ISNULL("${needs[$k]}"Status.fStopTime) "
+      query=$query" AND NOT ISNULL("${needs[$k]}"Status.fAvailable) "
+      query=$query" AND ISNULL("${needs[$k]}"Status.fReturnCode) "
+      query=$query" , 1, NULL)) "
+   done
+}
+
+# function to get todolist
+#   returns the next or the list of next steps
+function gettodo()
+{
+   # reset the variable for the number of the next step 
+   process=
+   printprocesslog "DEBUG getting todo for step $step..."
+   getstepinfo
+   # get query
+   query=" SELECT "${prims[@]}
+   getstatusquery $2
+   # order by priority to the the number of the next step to be done
+   query=$query" ORDER BY "$step"Status.fPriority "
+   if [ "$sort" = "" ]
+   then 
+      query=$query" DESC "
+   else
+      query=$query" "$sort
+   fi
+   # add limitation in case only one or a limited number of 
+   #  processes should be executed
+   if [ "$1 " != " " ]
+   then 
+      query=$query" limit 0, $1 "
+   fi
+   # print query 
+   printprocesslog "DEBUG gettodo for step $step QUERY: "$query
+   # execute query 
+   process=`sendquery`
+   #if ! process=`mysql -s -u $us --password=$pw --host=$ho $db -e " $query "`
+   #then
+   #   printprocesslog "ERROR could not query processes from db (program: $program, function gettodo)"
+   #   finish
+   #fi
+   # get numbers of next step from mysql result
+   if [ "$process" = "" ]
+   then
+      printprocesslog "DEBUG => nothing to do"
+      finish
+   else
+      primaries=( $process )
+      num=`expr ${#primaries[@]} / ${#prims[@]} `
+   fi
+}
+
+# function to get the number of processes which still have to be done
+function getstatus()
+{
+   # reset the variable for the number of steps to be done
+   numproc=0
+   getstepinfo
+   # get query
+   query=" SELECT "${prims[@]}
+   getstatusquery $1
+   # print query
+   printprocesslog "DEBUG getstatus for step $step QUERY: "$query
+   # execute query
+   #numproc=`sendquery `#cannot be done with sendquery, because of row counting
+   if ! numproc=`mysql -s -u $us --password=$pw --host=$ho $db -e " $query " | wc -l`
+   then
+      printprocesslog "ERROR could not query number of processes from db (program: $program, function getstatus)"
+      echo `date +%F\ %T`" ERROR could not query number of processes from db (program: $program, function getstatus)"
+      continue
+   fi
+}
+
+# function to set status of a process in the db
+function setstatus()
+{
+   # remark:
+   # this function does not include the 'Default' flag 
+   # for resetting steps
+
+   # for dowebplots (there are steps which have no entry in the DB)
+   if [ "$step" = "no" ]
+   then
+      return
+   fi
+   
+   # reset status values
+   starttime=NULL
+   stoptime=NULL
+   availtime=NULL
+   returncode=NULL
+   # evaluate the status values
+   case $@ in
+      start)   printprocesslog "DEBUG setstatus start"
+               starttime="Now()"
+               ;;
+       stop)   case $check in
+                  ok)  printprocesslog "DEBUB setstatus stop - ok"
+                       starttime=noreset
+                       stoptime="Now()"
+                       if [ "$processingsite" = "$storagesite" ]
+                       then
+                          availtime="Now()"
+                       fi
+                       ;;
+                  no)  printprocesslog "DEBUG setstatus stop - nothing new"
+                       check="ok"
+                       ;;
+                   *)  printprocesslog "DEBUG setstatus stop - failed"
+                       starttime=noreset
+                       stoptime="Now()"
+                       if [ "$processingsite" = "$storagesite" ]
+                       then
+                          availtime="Now()"
+                       fi
+                       if [ "$check" == "" ]
+                       then
+                          returncode=1
+                       else
+                          returncode=$check
+                       fi
+                       check="ok"
+                       ;;
+               esac
+               ;;
+          *)   printprocesslog "ERROR function setstatus got wrong variable"
+               finish
+               ;;
+   esac
+   
+   # get 
+   getstepinfo
+   
+   # get the influences from the steps.rc by evaluating the needs of all steps
+   othersteps=`grep $step $steps | grep -v '#' | grep "Needs" | grep -v "$step[.]Needs" | cut -d'.' -f1`
+   
+   # get query
+   query=" UPDATE "
+   getalljoins
+   # set the status values according to the new status of the step
+   query=$query" SET "
+   if ! [ "$starttime" = "noreset" ]
+   then
+      query=$query" "$step"Status.fStartTime=$starttime, "
+   fi
+   query=$query" "$step"Status.fStopTime=$stoptime, "$step"Status.fAvailable=$availtime"
+   query=$query", "$step"Status.fReturnCode=$returncode , "$step"Status.fProcessingSiteKEY=$sitekey "
+   # set also the status values of the influenced steps
+   for otherstep in $othersteps
+   do
+      query=$query", "$otherstep"Status.fStartTime=NULL "
+      query=$query", "$otherstep"Status.fStopTime=NULL "
+      query=$query", "$otherstep"Status.fAvailable=NULL "
+      query=$query", "$otherstep"Status.fReturnCode=NULL "
+      query=$query", "$otherstep"Status.fProcessingSiteKEY=NULL "
+   done
+   # give the condition for which step the status values have to be set
+   query=$query" WHERE "
+   if [ "$s" = "" ]
+   then
+      s=0
+   fi
+   query=$query" "$step"Status."`echo ${prims[0]} | sed -e 's/,//g'`"='${primaries[$s*${#prims[@]}]}'"
+   for (( j=1 ; j < ${#prims[@]} ; j++ ))
+   do
+      query=$query" AND "$step"Status."`echo ${prims[$j]} | sed -e 's/,//g'`"='${primaries[$s*${#prims[@]}+$j]}' "
+   done 
+   # add additional query to allow for locking in db
+   if [ "$1" = "start" ]
+   then 
+      query=$query" AND ISNULL("$step"Status.fStartTime) "
+   fi
+   # add row count to know how many rows have been changed
+   query=$query"; SELECT ROW_COUNT();"
+   # print query
+   printprocesslog "DEBUG setstatus for step $step QUERY: "$query
+   #echo "DEBUG setstatus for step $step QUERY: "$query
+   # execute query
+   numchanged=`sendquery`
+   #echo "numchanged: "$numchanged
+   #if ! numchanged=`mysql -s -u $us --password=$pw --host=$ho $db -e " $query "`
+   #then
+   #   printprocesslog "ERROR could not set status in db (program: $program, function setstatus)"
+   #   finish
+   #fi
+   if [ $numchanged -gt 0 ]
+   then
+      printprocesslog "INFO successful set of status in DB."
+      #echo "INFO successful set of status in DB."
+   else
+      # action may be taken in script using $numchanged
+      printprocesslog "ERROR status in DB was already set by another process "
+      #echo "ERROR status in DB was already set by another process "
+   fi
+}
+
+function getdates()
+{
+   case $1 in
+      # all dates
+      all) 
+         printprocesslog "DEBUG getdates case 'all'"
+         dates=( `find $auxdata -mindepth 3 -type d | sort -r | sed "s/\${auxdata_for_sed}//g" | sed -e 's/^\///'` ) 
+         ;;
+      # certain date
+      [0-9][0-9][0-9][0-9]/[0-9][0-9]/[0-9][0-9])
+         printprocesslog "DEBUG getdates case certain date: "$1
+         dates=( $1 )
+         ;;
+      # certain number of dates (between 0 and 99 )
+      [0-9][0-9]|[0-9]|[0-9][0-9][0-9])
+         # get last n nights
+         for (( numdates=1 ; numdates <= $1 ; numdates++ ))
+         do 
+            numhours=`echo " 12 + ( $numdates - 1 ) * 24 " | bc -l`
+            dates=( ${dates[@]} `date +%Y/%m/%d --date="-"$numhours"hour"` )
+         done
+         # hour-dependent number of dates
+         if [ "$2" != "" ] && [ "$3" != "" ]
+         then 
+            # get current hour
+            hour=`date +%k`
+            if [ $hour -le $2 ] || [ $hour -ge $3 ]
+            then
+               dates=( `date +%Y/%m/%d --date="-12hour"` )
+            fi
+         fi
+         ;;
+      *) # nothing valid given
+         echo "Please give valid options for the function 'getdates()'"
+         finish
+         ;;
+   esac
+}
+
Index: branches/trigger_burst_research/Transfer/BackupAuxToWue.sh
===================================================================
--- branches/trigger_burst_research/Transfer/BackupAuxToWue.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/BackupAuxToWue.sh	(revision 18288)
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+# this script has been written to run on La Palma on the machine data
+#   i.e. paths are only working on this machine
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting "$0
+
+logfile=$runlogpath"/RsyncAuxToWue-"$datetime".log"
+date >> $logfile
+
+getdates 6
+
+# do rsync for rawfiles of these dates
+for date in ${dates[@]}
+do 
+   echo "" >> $logfile 2>&1
+   # auxiliary data directories
+   auxdirisdc=/scratch/from_lapalma/aux/./$date/ 
+   auxdirwue=/fact/aux
+   
+   if ! [ -d $auxdirisdc ]
+   then 
+      printprocesslog "INFO "$auxdirisdc" not available." >> $logfile 2>&1
+      continue
+   fi
+   printprocesslog "INFO processing files in "$auxdirisdc >> $logfile 2>&1
+   echo `date`": processing files in "$auxdirisdc >> $logfile 2>&1
+   
+   #rsync from ISDC to Wue
+   if ! rsync -rltDvR --stats -T $rsynctempdir $auxdirisdc operator@coma.astro.uni-wuerzburg.de:$auxdirwue >> $logfile 2>&1
+   then
+      printprocesslog "CONNECTION problem rsyncing auxiliary data for "$date" from ISDC to Wue"
+      echo `date`": problem rsyncing auxiliary data for "$date" from ISDC to Wue" >> $logfile 2>&1
+      #echo `date`": problem rsyncing auxiliary data for "$date" from ISDC to Wue"
+   fi
+done
+
Index: branches/trigger_burst_research/Transfer/BackupDatabase.sh
===================================================================
--- branches/trigger_burst_research/Transfer/BackupDatabase.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/BackupDatabase.sh	(revision 18288)
@@ -0,0 +1,89 @@
+#!/bin/bash
+#
+# This script backups the databases give in setup.XXX in the array dbnames
+# 
+
+today=`date +%F`
+olday=`date +%F --date="-30day"`
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+set -C
+
+logfile=$runlogpath"/BackupDatabase-"$datetime".log"
+date >> $logfile
+
+# store files on data
+path=/users/fact/DB_Backup 
+
+# getdbsetup
+
+echo "today: $today" >> $logfile 2>&1
+echo "date to remove: $olday" >> $logfile 2>&1
+
+printprocesslog "INFO doing backup for the following databases: "${dbnames[@]}
+echo "doing backup for the following databases: "${dbnames[@]}  >> $logfile 2>&1
+for dbname in ${dbnames[@]}
+do 
+   filepath=$path/$dbname
+   command=$filepath"/Create_"$dbname"_"$today".txt"
+   oldcommand=$filepath"/Create_"$dbname"_"$olday".txt"
+   mkdir -pv $filepath >> $logfile 2>&1
+
+   file=$filepath/$dbname
+   sqlfile=$file"_"$today".sql"
+   oldzip=$file"_"$olday".sql.bz2"
+
+   printprocesslog "INFO removing old files..."
+   echo "removing old files..." >> $logfile 2>&1
+   if ls $oldzip >/dev/null 2>&1
+   then
+      rm -v $oldzip >> $logfile 2>&1
+   fi
+   if ls $oldcommand >/dev/null 2>&1
+   then
+      rm -v $oldcommand >> $logfile 2>&1
+   fi
+
+   printprocesslog "INFO writing create commands for database '"$dbname"' to "$command
+   echo "writing create commands for database '"$dbname"' to "$command >> $logfile 2>&1
+   # commands to create db
+   if ! mysqldump --host=localhost --database $dbname -u dump --no-data >| $command 2>> $logfile
+   then
+      printprocesslog "ERROR mysqldump failed for database '"$dbname"'"
+      echo "ERROR mysqldump failed for "$dbname >> $logfile 2>&1
+      rm -v $command  >> $logfile 2>&1
+   fi
+
+   printprocesslog "INFO writing database '"$dbname"' to "$sqlfile
+   echo "writing database '"$dbname"' to "$sqlfile >> $logfile 2>&1
+   # mysqldump of full DB
+   if ! mysqldump --host=localhost --database $dbname -u dump >| $sqlfile 2>> $logfile
+   then
+      printprocesslog "ERROR mysqldump failed for database '"$dbname"'"
+      echo "ERROR mysqldump failed for database '"$dbname"'" >> $logfile 2>&1
+      rm -v $sqlfile  >> $logfile 2>&1
+   else
+      if ! bzip2 -9 $sqlfile >> $logfile 2>&1
+      then 
+         printprocesslog "ERROR zipping of "$sqlfile" failed." 
+         echo "ERROR zipping of "$sqlfile" failed."  >> $logfile 2>&1
+      fi
+   fi
+done
+
+dbdirwue=/home/operator/budb/fact_from_lp
+echo "" >> $logfile 2>&1
+echo `date`": rsyncing files in "$path" to Wuerzburg (coma: "$dbdirwue")" >> $logfile 2>&1
+printprocesslog "INFO rsyncing files in "$path" to Wuerzburg (coma: "$dbdirwue")"
+
+#rsync from gate to coma
+if ! /usr/bin/rsync -avxP $path operator@coma.astro.uni-wuerzburg.de:$dbdirwue >> $logfile 2>&1
+then
+   echo `date`": problem rsyncing database from LP ("$path") to Wuerzburg (coma:"$dbdirwue")" >> $logfile 2>&1
+   printprocesslog "CONNECTION problem rsyncing database from LP ("$path") to Wuerzburg (coma:"$dbdirwue")"
+fi
+
+finish >> $logfile 2>&1
+
Index: branches/trigger_burst_research/Transfer/BackupQLA.sh
===================================================================
--- branches/trigger_burst_research/Transfer/BackupQLA.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/BackupQLA.sh	(revision 18288)
@@ -0,0 +1,45 @@
+#!/bin/bash
+#
+# This script does a backup of the QLA results
+# It can be used in different location
+# 
+# Be careful: do NOT use --delete as on daq callisto files are deleted
+# 
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+set -C
+
+# get paths depending on host
+case $HOSTNAME in
+        dc11) from="fact_opr@isdc-nx00.isdc.unige.ch:/gpfs0/fact/processing/qla/"
+              to="/scratch/fact/qla"
+              ;;
+         daq) from="/loc_data/analysis/"
+              to="/newdaq/analysis_bu"
+              # new temporary solution with rsync-servers
+              to="newdaq::newdaq/analysis_bu"
+              ;;
+   isdc-dl00) from="fact@161.72.93.131:/daq/analysis/"
+              #to="/gpfs/scratch/fact/qla"
+              to="/gpfs0/fact/processing/qla"
+              # for new rsync: update variables
+              ;;
+           *) echo "no valid host "$HOSTNAME
+              exit
+              ;;
+esac
+
+logfile=$runlogpath"/BackupQLA-"$datetime".log"
+date >> $logfile
+
+if ! rsync -av $from $to >> $logfile 2>&1
+then
+   printprocesslog "CONNECTION problem rsyncing QLA from "$from" to "$to
+   echo `date`": problem rsyncing QLA from "$from" to "$to >> $logfile 2>&1
+fi
+
+
+finish >> $logfile 2>&1
+
Index: branches/trigger_burst_research/Transfer/BackupRawToWue.sh
===================================================================
--- branches/trigger_burst_research/Transfer/BackupRawToWue.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/BackupRawToWue.sh	(revision 18288)
@@ -0,0 +1,117 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=RsyncRawToWue
+step=RawFileAvailWue
+
+set -C
+
+# check if only a certain date should be processed
+#  (feature for manual speedup) 
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   printprocesslog "INFO process only data of "$certaindate
+   echo "INFO process only data of "$certaindate
+   specialnight=`echo $certaindate | sed -e 's/\///g'`
+fi
+
+
+# checking that not more than $numrsyncwuelimit 
+#  instances of the script are running
+numrsyncwue=`/usr/sbin/lsof $0 | grep -o -c $0`
+if [ $numrsyncwue -gt $numrsyncwuelimit ]
+then
+   printprocesslog "INFO already "$numrsyncwue" BackupRawToWue.sh running -> exit. "
+   finish
+fi
+
+logfile=$runlogpath"/RsyncRawToWue-"$datetime".log"
+date >> $logfile
+
+# get todo list
+gettodo 
+
+
+for (( s=0 ; s < $num ; s++ ))
+do
+   night=${primaries[$s+$s]}
+   runid=${primaries[$s+$s+1]}
+   nightpath=`echo $night | cut -c 1-4`"/"`echo $night | cut -c 5-6`"/"`echo $night | cut -c 7-8`
+   nightrun=$night"_"`printf %03d $runid`
+   
+   if [ "$specialnight" != "" ]
+   then
+      if [ $night -ne $specialnight ]
+      then
+         printprocesslog "INFO Treat only "$specialnight" -> continue for "$night
+         echo "INFO Treat only "$specialnight" -> continue for "$night
+         continue
+      fi
+   fi
+   
+   rawdirisdc="/scratch/from_lapalma/raw"
+   isdcfile=`find $rawdirisdc -type f -regex ".*$nightrun[.]fits[.][fg]z"`
+   if [ "$isdcfile" == "" ]
+   then
+      echo "ERROR Could not find raw file for "$nightrun" on isdc-dl00 - Something is really wrong."
+      printprocesslog "ERROR Could not find raw file for "$nightrun" on isdc-dl00 - Something is really wrong."
+      continue
+   else
+      filename=`basename $isdcfile`
+   fi
+
+   rawfileisdc=$rawdirisdc"/./"$nightpath"/"$filename
+   rawdirwue="/fact/raw/"
+
+   setstatus "start" 
+   if [ $numchanged -eq 0 ]
+   then
+      printprocesslog "INFO "$numchanged" rows were changed in DB => "$night"_"$runid" is already processed => continue."
+      continue
+   fi
+
+   printprocesslog "INFO rsync rawfile "$rawfileisdc" to Wue "$rawdirwue
+   #rsync -rltDvR --partial --stats --password-file=/home_nfs/isdc/fact_opr/rsync.pwd factdata@161.72.93.131::$rawfilelp $rawdirisdc >> $logfile 2>&1
+   #echo "rsync -avxR --no-p --stats -T $rsynctempdir $rawfileisdc operator@coma.astro.uni-wuerzburg.de:$rawdirwue >> $logfile 2>&1"
+   rsync -avxR --no-p --stats -T $rsynctempdir $rawfileisdc operator@coma.astro.uni-wuerzburg.de:$rawdirwue >> $logfile 2>&1
+   check1=$?
+
+   case $check1 in
+      0)   printprocesslog "INFO rawfile "$rawfileisdc" transferred successfully to Wue."
+           ;;
+      *)   printprocesslog "CONNECTION "$rawfileisdc" could not be transferred to Wue."
+           check=$check1
+           ;;
+   esac
+   
+   query="SELECT fHasDrsFile from RunInfo WHERE fNight="$night" AND fRunID="$runid
+   numdrs=`sendquery`
+   if [ $numdrs -gt 0 ]
+   then 
+      drsfileisdc=$rawdirisdc"/./"$nightpath"/"$nightrun".drs.fits.gz"
+      printprocesslog "INFO rsync rawfile "$drsfileisdc" to Wue "$rawdirwue
+      rsync -avxR --no-p --stats $drsfileisdc operator@coma.astro.uni-wuerzburg.de:$rawdirwue >> $logfile 2>&1
+      check1=$?
+
+      case $check1 in
+         0)   printprocesslog "INFO rawfile "$drsfileisdc" transferred successfully to Wue."
+              ;;
+         *)   printprocesslog "CONNECTION "$drsfileisdc" could not be transferred to Wue."
+              check=$check1
+              ;;
+      esac
+   fi
+
+   setstatus "stop" 
+done
+
+finish 
+
Index: branches/trigger_burst_research/Transfer/CheckTransfer.sh
===================================================================
--- branches/trigger_burst_research/Transfer/CheckTransfer.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/CheckTransfer.sh	(revision 18288)
@@ -0,0 +1,926 @@
+#!/bin/bash
+#
+# This script checks whether data can be deleted
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+
+numchecktransfer=`/usr/sbin/lsof $0 | grep -o -c $0`
+if [ $numchecktransfer -gt 1 ]
+then
+   printprocesslog "INFO "$0" already running -> exit. "
+   echo "INFO "$0" already running -> exit. "
+   finish
+fi
+
+logfile2=$logpath"/transfer/CheckTransfer.log"
+date > $logfile2 2>&1
+
+diskusage=( `ssh fact@161.72.93.131 "df -P /daq" | grep daq ` )
+# check if more than X GB are left on /daq
+if [ ${diskusage[3]} -lt 700000 ] 
+then
+   printprocesslog "DISK less than 700 GB left on /daq ("${diskusage[3]}")"
+   echo "WARN less than 700 GB left on /daq ("${diskusage[3]}")"
+   echo "WARN less than 700 GB left on /daq ("${diskusage[3]}")" >> $logfile2 2>&1
+   sendemail="yes"
+fi
+
+diskusage2=( `df -P /scratch | grep scratch ` )
+# check if more than X GB are left on /scratch
+if [ ${diskusage2[3]} -lt 500000 ] 
+then
+   printprocesslog "DISK less than 500 GB left on /scratch ("${diskusage2[3]}")"
+   echo "WARN less than 500 GB left on /scratch "${diskusage2[3]}")"
+   echo "WARN less than 500 GB left on /scratch "${diskusage2[3]}")" >> $logfile2 2>&1
+   sendemail="yes"
+fi
+
+# needed for transfer to phido
+#source /home_nfs/isdc/fact_opr/myagent.sh
+
+# check first the disk in LP and on dl00
+ssh fact@161.72.93.131 "df -h /*da*"
+df -h /scratch
+echo ""
+echo "" >> $logfile2 2>&1
+
+# check next the DB to know if some transfer processes failed or crashed
+function check_runs_in_db()
+{
+   query="SELECT "$toquery" FROM "$1" "$where
+   #echo $query
+   runs=( `sendquery $query` )
+   if [ ${#runs[@]} -gt 0 ]
+   then
+      #for run in ${runs[@]}
+      #do
+      #   echo $run
+      #done
+      sendemail="yes"
+      echo -e "\e[1;31m\x1b[5m ==>\e[00m "$1": "${runs[@]}"\e[1;31m\x1b[5m <==\e[00m "
+      echo "SELECT fNight, fRunId, fStartTime, fStopTime, fReturnCode FROM "$1" "$where";"
+      echo "UPDATE "$1" SET fStartTime=NULL, fStopTime=NULL, fAvailable=NULL, fReturnCode=NULL, fProcessingSiteKey=NULL "$where";"
+      echo -e " ==> "$1": "${runs[@]}" <== " >> $logfile2 2>&1
+      echo "---> Please check the DB and reset the processes if needed. " >> $logfile2 2>&1
+      echo "to check: SELECT fNight, fRunId, fStartTime, fStopTime, fReturnCode FROM "$1" "$where";" >> $logfile2 2>&1
+      echo "to reset: UPDATE "$1" SET fStartTime=NULL, fStopTime=NULL, fAvailable=NULL, fReturnCode=NULL, fProcessingSiteKey=NULL "$where";" >> $logfile2 2>&1
+   fi
+}
+# get information of runs where transfer had a problem
+toquery="fNight, fRunID, fStartTime, fStopTime, fAvailable, fProcessingSiteKey, fReturnCode "
+toquery="CONCAT(fNight, '_', fRunID, '(', fStartTime, '-', fStopTime, ':', fReturnCode, ')') "
+toquery="CONCAT(fNight, '_', fRunID, ':', fReturnCode) "
+toquery="IF (ISNULL(fReturnCode), CONCAT(fNight, '_', fRunID, 'crashed'), CONCAT(fNight, '_', fRunID, 'failed', fReturnCode)) "
+where="WHERE NOT ISNULL(fReturnCode) OR (NOT ISNULL(fStartTime) AND ISNULL(fStopTime)) AND fStartTime < DATE_ADD(Now(), INTERVAL -5 HOUR) "
+check_runs_in_db "RawFileRsyncedISDCStatus"
+check_runs_in_db "RawFileAvailWueStatus"
+# RawFileAvailISDC needs a different treatment 
+#  as return code 0 means that file is in fails folder in archive
+where="WHERE fReturnCode>0 OR (NOT ISNULL(fStartTime) AND ISNULL(fStopTime)) AND fStartTime < DATE_ADD(Now(), INTERVAL -1 HOUR) "
+check_runs_in_db "RawFileAvailISDCStatus"
+
+# get nights from directory in LP
+dates=( `ssh fact@161.72.93.131 "find /loc_data/zipraw -mindepth 3 -type d | sort | sed -e 's/\/loc_data\/zipraw\///g' "` )
+
+numdaysok=0
+numdaysoklimit=10
+sumdata=0
+checklimit=5000
+for date in ${dates[@]}
+do 
+   logfile=$logpath"/transfer/CheckTransfer_"`echo $date | sed -e 's/\//-/g'`".log"
+
+   echo ""
+   echo "" >> $logfile2 2>&1
+   echo "Processing "$date" ..." 
+   echo "Processing "$date" ..." >> $logfile2 2>&1
+
+   # avoid that already checked days are checked again
+   if grep "EVERYTHING" $logfile >/dev/null 2>&1
+   then
+      echo " "$date" has been checked already and is fine. Please check logfile "$logfile
+      echo " "$date" has been checked already and is fine. Please check logfile "$logfile >> $logfile2 2>&1
+      sendemail="yes"
+      continue
+   fi
+   if [ "$certaindate" != "" ]
+   then
+      checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+      if [ "$checkstring" = "" ]
+      then
+         echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+         finish
+      fi
+      if [ "$certaindate" != "$date" ]
+      then
+         printprocesslog "INFO continue, as certaindate has been set to "$certaindate
+         echo " continue, as certaindate has been set to "$certaindate
+         continue
+      fi
+   fi
+
+   # some counters
+   numdiff=0
+   numok=0
+   numpb=0
+   
+   # check always only $numdaysoklimit days
+   # and require at least $checklimit GB that have been checked 
+   #  remark: bc: expr1 < expr2: the result is 1 if expr1 is strictly less than expr2
+   if [ $numdaysok -ge $numdaysoklimit ] && [ $(echo " $sumdata > $checklimit " | bc -l) -eq 1 ]
+   then
+      printprocesslog "INFO more than "$numdaysoklimit" ok and more than "$checklimit" GB checked. "
+      continue
+   fi
+   
+   # get paths
+   date2=`echo $date | sed -e 's/\///g'`
+#   lprawpath="/daq/raw/"$date
+   lprawpath="/newdaq/raw/"$date
+#   lprawpath2="/loc_data/raw/"$date
+   lprawpath2="/daq/raw/"$date
+   lpziprawpath="/loc_data/zipraw/"$date
+   localrawpath="/scratch/from_lapalma/raw/"$date
+   localrawpath3="/fact/raw/"$date
+#   localfailpath="/archive/fact/fails/raw/"$date
+   localfailpath="/gpfs/fact/fact-archive/fails/raw/"$date
+   wuerawpath="/fact/raw/"$date
+   qlapath="/daq/analysis/callisto/"$date
+   #phidorawpath="/fhgfs/groups/app/fact-construction/raw/"$date
+   
+   # get disk usage and number of files for directory
+   #   on newdaq files with more than 3 digits as run number are excluded
+   newdaq=( `ssh fact@161.72.93.131 "if [ -d $lprawpath ]; then ls $lprawpath/${date2}_[0-9][0-9][0-9][.]* | wc -l; du -s -b --apparent-size $lprawpath; else echo '-1 -1 -1'; fi"` )
+   daq=( `ssh fact@161.72.93.131 "if [ -d $lprawpath2 ]; then ls $lprawpath2/* | wc -l; du -s -b --apparent-size $lprawpath2; else echo '-1 -1 -1'; fi"` )
+   if [ ${newdaq[0]} -eq -1 ] && [ ${daq[0]} -eq -1 ] 
+   then
+      printprocesslog "INFO no data available on newdaq for "$date
+      echo "INFO no data available on newdaq for "$date >> $logfile 2>&1
+      continue
+   fi
+   zip=( `ssh fact@161.72.93.131 "if [ -d $lpziprawpath ]; then ls $lpziprawpath/* 2>/dev/null | wc -l; du -s -b --apparent-size $lpziprawpath; else echo '-1 -1 -1'; fi"` )
+   dl00=( `if [ -d $localrawpath ]; then ls $localrawpath/* | wc -l; du -s -b --apparent-size $localrawpath; else echo '-1 -1 -1'; fi` )
+   archive=( `if [ -d $localrawpath3 ]; then ls $localrawpath3/* 2>/dev/null | wc -l; du -L -s -b --apparent-size $localrawpath3; else echo '-1 -1 -1'; fi` )
+   fails=( `if [ -d $localfailpath ]; then ls $localfailpath/* | wc -l; du -L -s -b --apparent-size $localfailpath; else echo '-1 -1 -1'; fi` )
+   wue=( `ssh operator@coma.astro.uni-wuerzburg.de "if [ -d $wuerawpath ]; then ls $wuerawpath/* | wc -l; du -s -b --apparent-size $wuerawpath; else echo '-1 -1 -1'; fi"` )
+   #phido=( `ssh -i /home_nfs/isdc/fact_opr/.ssh/id_rsa.fact_opr.phido 129.217.160.201 "if [ -d $phidorawpath ]; then ls $phidorawpath/* | wc -l; du -s -b --apparent-size $phidorawpath; else echo '-1 -1 -1'; fi"` )
+   qla=( `ssh fact@161.72.93.131 "ls $qlapath/20*_C.root 2>/dev/null | wc -l"` )
+   query="SELECT Sum(if(fHasDrsFile=1,2,1)) FROM RunInfo WHERE fNight="$date2
+   querystart="SELECT Sum(if(fHasDrsFile=1,2,1)) FROM "
+   queryjoin="LEFT JOIN RunInfo USING(fNight,fRunID) "
+   querywhere="WHERE fNight="$date2" AND NOT ISNULL(fStartTime) AND NOT ISNULL(fStopTime) AND ISNULL(fReturnCode)"
+   numruns=`sendquery`
+   if [ "$numruns" == "" ]
+   then
+      numruns=0
+   fi
+   query=$querystart"RawFileRsyncedISDCStatus "$queryjoin" "$querywhere
+   numrsynced=`sendquery`
+   if [ "$numrsynced" == "" ]
+   then
+      numrsynced=0
+   fi
+   query=$querystart"RawFileAvailISDCStatus "$queryjoin" "$querywhere
+   numisdc=`sendquery`
+   if [ "$numisdc" == "" ]
+   then
+      numisdc=0
+   fi
+   query=$querystart"RawFileAvailWueStatus "$queryjoin" "$querywhere
+   numwue=`sendquery`
+   if [ "$numwue" == "" ]
+   then
+      numwue=0
+   fi
+   #query=$querystart"RawFileAvailPhidoStatus "$queryjoin" "$querywhere
+   #numphido=`sendquery`
+   #if [ "$numphido" == "" ]
+   #then
+   #   numphido=0
+   #fi
+   
+   # select number of data runs
+   query="SELECT COUNT(*) FROM RunInfo WHERE fNight="$date2
+   query=$query" AND fRunTypeKey=1"
+   numdatruns=`sendquery`
+   
+   printprocesslog "disk: "
+   printprocesslog "  newdaq  "${newdaq[@]}
+   printprocesslog "  daq     "${daq[@]}
+   printprocesslog "  data    "${zip[@]}
+   printprocesslog "  dl00    "${dl00[@]}
+   printprocesslog "  wue     "${wue[@]}
+   printprocesslog "  arch    "${archive[@]}
+   printprocesslog "  fail    "${fails[@]}
+   #printprocesslog "  phido   "${phido[@]}
+   printprocesslog "  qla     "${qla[@]}
+   printprocesslog "db: "
+   printprocesslog "  runinfo "$numruns
+   printprocesslog "  rsynced "$numrsynced
+   printprocesslog "  isdc    "$numisdc
+   printprocesslog "  wue     "$numwue
+   #printprocesslog "  phido   "$numphido
+   printprocesslog "  data    "$numdatruns
+   echo "disk: " >> $logfile 2>&1
+   echo "  newdaq  "${newdaq[@]} >> $logfile 2>&1
+   echo "  daq     "${daq[@]} >> $logfile 2>&1
+   echo "  data    "${zip[@]} >> $logfile 2>&1
+   echo "  dl00    "${dl00[@]} >> $logfile 2>&1
+   echo "  wue     "${wue[@]} >> $logfile 2>&1
+   echo "  arch    "${archive[@]} >> $logfile 2>&1
+   echo "  fail    "${fails[@]} >> $logfile 2>&1
+   #echo "  phido   "${phido[@]} >> $logfile 2>&1
+   echo "  qla     "${qla[@]} >> $logfile 2>&1
+   echo "db: " >> $logfile 2>&1
+   echo "  runinfo "$numruns >> $logfile 2>&1
+   echo "  rsynced "$numrsynced >> $logfile 2>&1
+   echo "  isdc    "$numisdc >> $logfile 2>&1
+   echo "  wue     "$numwue >> $logfile 2>&1
+   #echo "  phido   "$numphido >> $logfile 2>&1
+   echo "  datruns "$numdatruns >> $logfile 2>&1
+   
+   if ! [ $numdatruns -eq $qla ]
+   then
+      printprocesslog "WARN not all data runs are processed yet by the QLA for "$date"."
+      echo "WARN not all data runs are processed yet by the QLA for "$date >> $logfile 2>&1
+      result1="1-"
+   else
+      result1="0-"
+   fi
+   # check if file are available in the different places
+   if [ ${dl00[0]} -eq -1 ] && [ $date2 -lt 20120308 ] 
+   then
+      printprocesslog "INFO data not available on /scratch on dl00 for "$date
+      echo "INFO data not available on /scratch on dl00 for "$date >> $logfile 2>&1
+   fi
+   if [ ${archive[0]} -eq -1 ] 
+   then
+      printprocesslog "INFO data not in archive for "$date
+      echo "INFO data not in archive for "$date >> $logfile 2>&1
+   fi
+   
+   # check if number of files agree in the different places
+   # lp
+   if ! [ ${daq[0]} -eq -1 ]  && ! [ ${daq[0]} -eq ${newdaq[0]} ]
+   then
+      printprocesslog "WARN number of files on daq (" ${daq[0]}") does not agree with number of files on newdaq (" ${newdaq[0]}") for "$date
+      echo "WARN number of files on daq (" ${daq[0]}") does not agree with number of files on newdaq (" ${newdaq[0]}") for "$date >> $logfile 2>&1
+      numpb=`echo " $numpb + 1 " | bc -l `
+      result1=$result1"1"
+   else
+      numok=`echo " $numok + 1 " | bc -l `
+      result1=$result1"0"
+   fi
+   # dl00
+   #if ! [ ${dl00[0]} -eq -1 ]  && ! [ ${dl00[0]} -eq ${newdaq[0]} ]
+   if ! [ ${dl00[0]} -eq ${newdaq[0]} ]
+   then
+      printprocesslog "WARN number of files on dl00 (" ${dl00[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date
+      echo "WARN number of files on dl00 (" ${dl00[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date >> $logfile 2>&1
+      numpb=`echo " $numpb + 1 " | bc -l `
+      result1=$result1"1"
+   else
+      numok=`echo " $numok + 1 " | bc -l `
+      result1=$result1"0"
+   fi
+   # archive
+   if ! [ ${archive[0]} -eq -1 ]  && ! [ ${archive[0]} -eq ${newdaq[0]} ]
+   then
+      printprocesslog "WARN number of files in archive (" ${archive[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date
+      echo "WARN number of files in archive (" ${archive[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date >> $logfile 2>&1
+      #check /archive/rev_1/failed
+      if ! [ ${fails[0]} -eq -1 ]
+      then
+         sum=`echo " ${fails[0]} + ${archive[0]} " | bc -l `
+         if ! [ $sum -eq ${newdaq[0]} ]
+         then
+            printprocesslog "ERROR number of files in whole archive ("$sum") is different from number of files in La Palma ("${newdaq[0]}")."
+            echo "ERROR number of files in whole archive ("$sum") is different from number of files in La Palma ("${newdaq[0]}")." >> $logfile 2>&1
+            numpb=`echo " $numpb + 1 " | bc -l `
+            result1=$result1"1"
+         else
+            numok=`echo " $numok + 1 " | bc -l `
+            result1=$result1"0"
+         fi
+      else
+         result1=$result1"1"
+      fi
+   else
+      if [ ${archive[0]} -eq -1 ]
+      then 
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result1=$result1"1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result1=$result1"0"
+      fi
+   fi
+   # wue
+   #if ! [ ${wue[0]} -eq -1 ]  && ! [ ${wue[0]} -eq ${newdaq[0]} ]
+   if ! [ ${wue[0]} -eq ${newdaq[0]} ]
+   then
+      printprocesslog "WARN number of files in Wue (" ${wue[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date
+      echo "WARN number of files in Wue (" ${wue[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date >> $logfile 2>&1
+      numpb=`echo " $numpb + 1 " | bc -l `
+      result1=$result1"1"
+   else
+      numok=`echo " $numok + 1 " | bc -l `
+      result1=$result1"0"
+   fi
+   ## phido
+   #if ! [ ${phido[0]} -eq -1 ]  && ! [ ${phido[0]} -eq ${newdaq[0]} ]
+   #then
+   #   printprocesslog "WARN number of files on Phido (" ${phido[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date
+   #   echo "WARN number of files on Phido (" ${phido[0]}") does not agree with number of files in LP (" ${newdaq[0]}") for "$date >> $logfile 2>&1
+   #   numpb=`echo " $numpb + 1 " | bc -l `
+   #   result1=$result1"1"
+   #else
+   #   numok=`echo " $numok + 1 " | bc -l `
+   #   result1=$result1"0"
+   #fi
+   
+   short2=
+   if [ "$result1" != "0-0000" ] && [ "$short" != "no" ]
+   then
+      short2="yes"
+   fi
+   
+   if [ "$short2" = "yes" ] 
+   then
+      printprocesslog "number of files does not yet agree in all sites ("$result1") -> do no further checking."
+      echo "" >> $logfile 2>&1
+      echo "number of files does not yet agree in all sites ("$result1") -> do no further checking." >> $logfile 2>&1
+      echo "" >> $logfile 2>&1
+      # print to console
+      echo "SUMMARY for "$date
+      echo "-----------------------"
+      echo "  number of files does not yet agree in all sites: "${newdaq[0]}" (newdaq) "${daq[0]}" (daq) "${zip[0]}" (data) "${dl00[0]}" (dl) "${wue[0]}" (wue) "${archive[0]}" (arch) "${fails[0]}" (fails) "${qla[@]}" (qla) "$numdatruns" (datruns) "
+      echo "  "$date" is not yet transfered completely. Please check the logfile "$logfile
+      # print to single logfile
+      echo "SUMMARY for "$date >> $logfile 2>&1
+      echo "-----------------------" >> $logfile 2>&1
+      echo "  number of files does not yet agree in all sites: "${newdaq[0]}" (newdaq) "${daq[0]}" (daq) "${zip[0]}" (data) "${dl00[0]}" (dl) "${wue[0]}" (wue) "${archive[0]}" (arch) "${fails[0]}" (fails) "${qla[@]}" (qla) "$numdatruns" (datruns) " >> $logfile 2>&1
+      echo "  "$date" is not yet transfered completely. Please check the logfile "$logfile >> $logfile 2>&1
+      # print to global logfile
+      echo "SUMMARY for "$date >> $logfile2 2>&1
+      echo "-----------------------" >> $logfile2 2>&1
+      echo "  number of files does not yet agree in all sites: "${newdaq[0]}" (newdaq) "${daq[0]}" (daq) "${zip[0]}" (data) "${dl00[0]}" (dl) "${wue[0]}" (wue) "${archive[0]}" (arch) "${fails[0]}" (fails) "${qla[@]}" (qla) "$numdatruns" (datruns) " >> $logfile2 2>&1
+      echo "  "$date" is not yet transfered completely. Please check the logfile "$logfile >> $logfile2 2>&1
+      continue
+   fi
+   
+   # check du for raw files 
+   # la palma
+   if ! [ ${newdaq[1]} -eq ${daq[1]} ]
+   then
+      printprocesslog "WARN size of data doesn't agree on newdaq ("${newdaq[1]}") and daq ("${daq[1]}") for "$date
+      echo "WARN size of data doesn't agree on newdaq ("${newdaq[1]}") and daq ("${daq[1]}") for "$date >> $logfile 2>&1
+      numdiff=`echo " $numdiff + 1 " | bc -l `
+      numpb=`echo " $numpb + 1 " | bc -l `
+      result2="1"
+   else
+      numok=`echo " $numok + 1 " | bc -l `
+      result2="0"
+   fi
+   # check du for zipped raw files 
+   # dl00
+   if ! [ ${zip[1]} -eq ${dl00[1]} ] && [ $date2 -gt 20120307 ]
+   then
+      printprocesslog "WARN size of data doesn't agree on data ("${zip[1]}") and dl00 ("${dl00[1]}") for "$date
+      echo "WARN size of data doesn't agree on data ("${zip[1]}") and dl00 ("${dl00[1]}") for "$date >> $logfile 2>&1
+      if ! [ ${dl00[1]} -eq -1 ]
+      then 
+         numdiff=`echo " $numdiff + 1 " | bc -l `
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result2=$result2"1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result2=$result2"0"
+      fi
+   else
+      numok=`echo " $numok + 1 " | bc -l `
+      result2=$result2"0"
+   fi
+   # archive
+   if ! [ ${zip[1]} -eq ${archive[1]} ]
+   then
+      printprocesslog "WARN size of data doesn't agree on data ("${zip[1]}") and in archive ("${archive[1]}") for "$date
+      echo "WARN size of data doesn't agree on data ("${zip[1]}") and in archive ("${archive[1]}") for "$date >> $logfile 2>&1
+      if ! [ ${archive[1]} -eq -1 ]
+      then 
+         numdiff=`echo " $numdiff + 1 " | bc -l `
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result2=$result2"1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result2=$result2"0"
+      fi
+   else
+      numok=`echo " $numok + 1 " | bc -l `
+      result2=$result2"0"
+   fi
+   # wue
+   if ! [ ${zip[1]} -eq ${wue[1]} ]
+   then
+      printprocesslog "WARN size of data doesn't agree on data ("${zip[1]}") and in Wue ("${wue[1]}") for "$date
+      echo "WARN size of data doesn't agree on data ("${zip[1]}") and in Wue ("${wue[1]}") for "$date >> $logfile 2>&1
+      if ! [ ${wue[1]} -eq -1 ]
+      then 
+         numdiff=`echo " $numdiff + 1 " | bc -l `
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result2=$result2"1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result2=$result2"0"
+      fi
+   else
+      numok=`echo " $numok + 1 " | bc -l `
+      result2=$result2"0"
+   fi
+   ## phido
+   #if ! [ ${zip[1]} -eq ${phido[1]} ]
+   #then
+   #   printprocesslog "WARN size of data doesn't agree on data ("${zip[1]}") and on Phido ("${phido[1]}") for "$date
+   #   echo "WARN size of data doesn't agree on data ("${zip[1]}") and on Phido ("${phido[1]}") for "$date >> $logfile 2>&1
+   #   if ! [ ${phido[1]} -eq -1 ]
+   #   then 
+   #      numdiff=`echo " $numdiff + 1 " | bc -l `
+   #      numpb=`echo " $numpb + 1 " | bc -l `
+   #      result2=$result2"1"
+   #   else
+   #      numok=`echo " $numok + 1 " | bc -l `
+   #      result2=$result2"0"
+   #   fi
+   #else
+   #   numok=`echo " $numok + 1 " | bc -l `
+   #   result2=$result2"0"
+   #fi
+   #result=$result"-"
+
+   # check DB (only starting from 8.3.2012) (if-clause to be removed later)
+   if [ $date2 -gt 20120307 ]
+   then
+      # lp
+      if ! [ $numruns -eq ${newdaq[0]} ]
+      then
+         printprocesslog "WARN number of runs on newdaq ("${newdaq[0]}") not equal to number of runs ("$numruns")"
+         echo "WARN number of runs on newdaq ("${newdaq[0]}") not equal to number of runs ("$numruns")" >> $logfile 2>&1
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result3="1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result3="0"
+      fi
+      # dl00
+      if ! [ $numruns -eq $numrsynced ]
+      then
+         printprocesslog "WARN number of rsynced runs ("$numrsynced") not equal to number of runs ("$numruns")"
+         echo "WARN number of rsynced runs ("$numrsynced") not equal to number of runs ("$numruns")" >> $logfile 2>&1
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result3=$result3"1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result3=$result3"0"
+      fi
+      # archive
+      if ! [ $numruns -eq $numisdc ]
+      then
+         printprocesslog "WARN number of ingested files in archive ("$numisdc") not equal to number of runs ("$numruns")"
+         echo "WARN number of ingested files in archive ("$numisdc") not equal to number of runs ("$numruns")" >> $logfile 2>&1
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result3=$result3"1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result3=$result3"0"
+      fi
+      # wue
+      if ! [ $numruns -eq $numwue ]
+      then
+         printprocesslog "WARN number of backuped in Wue ("$numrsynced") not equal to number of runs ("$numruns")"
+         echo "WARN number of backuped in Wue ("$numrsynced") not equal to number of runs ("$numruns")" >> $logfile 2>&1
+         numpb=`echo " $numpb + 1 " | bc -l `
+         result3=$result3"1"
+      else
+         numok=`echo " $numok + 1 " | bc -l `
+         result3=$result3"0"
+      fi
+      ## phido
+      #if ! [ $numruns -eq $numphido ]
+      #then
+      #   printprocesslog "WARN number of backuped on Phido ("$numrsynced") not equal to number of runs ("$numruns")"
+      #   echo "WARN number of backuped on Phido ("$numrsynced") not equal to number of runs ("$numruns")" >> $logfile 2>&1
+      #   numpb=`echo " $numpb + 1 " | bc -l `
+      #   result3=$result3"1"
+      #else
+      #   numok=`echo " $numok + 1 " | bc -l `
+      #   result3=$result3"0"
+      #fi
+   fi
+   
+   #numdiff=0 # add for debugging so that single file sizes are not checked
+   printprocesslog "numdiff: "$numdiff
+   printprocesslog "INFO numok: "$numok
+   printprocesslog "INFO numpb: "$numpb
+   echo "numdiff: "$numdiff >> $logfile 2>&1
+   echo "INFO numok: "$numok >> $logfile 2>&1
+   echo "INFO numpb: "$numpb >> $logfile 2>&1
+   #if [ $numdiff -gt 0 ]
+   if [ $numdiff -ge 0 ]
+   then
+      query="SELECT fRunID FROM RunInfo WHERE fNight="$date2
+      runs=( `sendquery` )
+      archivediffcounter=0
+      archiveokcounter=0
+      wuediffcounter=0
+      wueokcounter=0
+      #phidodiffcounter=0
+      #phidookcounter=0
+      dl00diffcounter=0
+      dl00okcounter=0
+      daqdiffcounter=0
+      daqokcounter=0
+      printprocesslog "INFO found "${#runs[@]}" rawfiles in DB."
+      echo "INFO found "${#runs[@]}" rawfiles in DB." >> $logfile 2>&1
+      for run in ${runs[@]}
+      do 
+         rawfile=$date2"_"`printf %03d $run`".fits"
+         #rawfile2=$rawfile".gz"
+         rawfile2=$rawfile".*z"
+         printprocesslog "INFO checking "$rawfile
+         
+         # get file sizes for run
+         #sizenewdaq=( `ssh fact@161.72.93.131 "ls -l ${lprawpath}/${rawfile} 2>/dev/null | awk '{ print \\\$5 }'"` )
+         sizenewdaq=( `ssh fact@161.72.93.131 "ls -lH ${lprawpath}/${rawfile} 2>/dev/null | awk '{ print \\\$5 }'"` )
+         sizedaq=( `ssh fact@161.72.93.131 "ls -l $lprawpath2/$rawfile 2>/dev/null | awk '{ print \\\$5 }'"` )
+         sizezip=( `ssh fact@161.72.93.131 "ls -l $lpziprawpath/$rawfile2 2>/dev/null | awk '{ print \\\$5 }'"` )
+         if ! [ ${dl00[1]} -eq -1 ]
+         then
+            sizedl00=( `ls -l $localrawpath/$rawfile2 2>/dev/null | awk '{ print \$5 }'` )
+         fi
+         if ! [ ${archive[1]} -eq -1 ]
+         then
+            sizearchive=( `ls -lH $localrawpath3/$rawfile2 2>/dev/null | awk '{ print \$5 }'` )
+         fi
+         if ! [ ${fails[1]} -eq -1 ]
+         then
+            sizefails=( `ls -lH $localfailpath/$rawfile2 2>/dev/null | awk '{ print \$5 }'` )
+         fi
+         if ! [ ${wue[1]} -eq -1 ]
+         then
+            sizewue=( `ssh operator@coma.astro.uni-wuerzburg.de "ls -l $wuerawpath/$rawfile2 2>/dev/null | awk '{ print \\\$5 }'"` )
+         fi
+         #if ! [ ${phido[1]} -eq -1 ]
+         #then
+         #   sizephido=( `ssh -i /home_nfs/isdc/fact_opr/.ssh/id_rsa.fact_opr.phido 129.217.160.201 "ls -l $phidorawpath/$rawfile2 2>/dev/null | awk '{ print \\\$5 }'"` )
+         #fi
+         
+         # check file sizes for run
+         # lp
+         if ! [ "$sizenewdaq" = "$sizedaq" ]
+         then
+            printprocesslog "WARN "$rawfile" newdaq("$sizenewdaq") daq("$sizedaq")"
+            echo "  "$rawfile" newdaq("$sizenewdaq") daq("$sizedaq")" >> $logfile 2>&1
+            daqdiffcounter=`echo " $daqdiffcounter + 1 " | bc -l `
+         else
+            daqokcounter=`echo " $daqokcounter + 1 " | bc -l `
+         fi
+         # dl00
+         if ! [ "$sizezip" = "$sizedl00" ] && ! [ ${dl00[1]} -eq -1 ]
+         then
+            printprocesslog "WARN "$rawfile2" data("$sizezip") dl00("$sizedl00")"
+            echo "  "$rawfile2" data("$sizezip") dl00("$sizedl00")" >> $logfile 2>&1
+            dl00diffcounter=`echo " $dl00diffcounter + 1 " | bc -l `
+         else
+            dl00okcounter=`echo " $dl00okcounter + 1 " | bc -l `
+         fi
+         # archive
+         #if [  "$sizezip" != "$sizearchive"  -a  ${archive[1]} -ne -1 -a "$sizearchive" != "" ] || [  "$sizezip" != "$sizefails" -a ${fails[1]} -ne -1 -a "$sizefails" != "" ] #not yet ingested files are treated wrongly
+         #if [ ${archive[1]} -ne -1 ] && [ "$sizezip" != "$sizearchive" -o "$sizezip" != "$sizefails" ] 
+         if [ ${archive[1]} -ne -1 -a "$sizezip" != "$sizearchive" -a "$sizezip" != "$sizefails" ] 
+         then
+            printprocesslog "WARN "$rawfile2" data("$sizezip") archive("$sizearchive"/"$sizefails")"
+            echo "  "$rawfile2" data("$sizezip") archive("$sizearchive"/"$sizefails")" >> $logfile 2>&1
+            #echo " "$sizezip"-"$sizearchive"-"${archive[1]}"-"$sizezip"-"$sizefails"-"${fails[1]}
+            archivediffcounter=`echo " $archivediffcounter + 1 " | bc -l `
+         else
+            archiveokcounter=`echo " $archiveokcounter + 1 " | bc -l `
+         fi
+         # wue
+         if ! [ "$sizezip" = "$sizewue" ] && ! [ ${wue[1]} -eq -1 ]
+         then
+            printprocesslog "WARN "$rawfile2" data("$sizezip") wue("$sizewue")"
+            echo "  "$rawfile2" data("$sizezip") wue("$sizewue")" >> $logfile 2>&1
+            wuediffcounter=`echo " $wuediffcounter + 1 " | bc -l `
+         else
+            wueokcounter=`echo " $wueokcounter + 1 " | bc -l `
+         fi
+         ## phido
+         #if ! [ "$sizezip" = "$sizephido" ] && ! [ ${phido[1]} -eq -1 ]
+         #then
+         #   printprocesslog "  "$rawfile2" data("$sizezip") phido("$sizephido")"
+         #   echo "  "$rawfile2" data("$sizezip") phido("$sizephido")" >> $logfile 2>&1
+         #   phidodiffcounter=`echo " $phidodiffcounter + 1 " | bc -l `
+         #else
+         #   phidookcounter=`echo " $phidookcounter + 1 " | bc -l `
+         #fi
+      done
+      query="SELECT fRunID FROM RunInfo WHERE fNight="$date2" AND fHasDrsFile=1"
+      drsruns=( `sendquery` )
+      printprocesslog "INFO found "${#drsruns[@]}" drsfiles in DB."
+      echo "INFO found "${#drsruns[@]}" drsfiles in DB." >> $logfile 2>&1
+      for drsrun in ${drsruns[@]}
+      do 
+         rawfile=$date2"_"`printf %03d $drsrun`".drs.fits"
+         #rawfile2=$rawfile".gz"
+         rawfile2=$rawfile".*z"
+         # get file sizes for run
+         #sizenewdaq=( `ssh fact@161.72.93.131 "ls -l ${lprawpath}/${rawfile} 2>/dev/null | awk '{ print \\\$5 }'"` )
+         sizenewdaq=( `ssh fact@161.72.93.131 "ls -lH ${lprawpath}/${rawfile} 2>/dev/null | awk '{ print \\\$5 }'"` )
+         sizedaq=( `ssh fact@161.72.93.131 "ls -l $lprawpath2/$rawfile 2>/dev/null | awk '{ print \\\$5 }'"` )
+         sizezip=( `ssh fact@161.72.93.131 "ls -l $lpziprawpath/$rawfile2 2>/dev/null | awk '{ print \\\$5 }'"` )
+         if ! [ ${dl00[1]} -eq -1 ]
+         then
+            sizedl00=( `ls -l $localrawpath/$rawfile2 2>/dev/null | awk '{ print \$5 }'` )
+         fi
+         if ! [ ${archive[1]} -eq -1 ]
+         then
+            sizearchive=( `ls -lH $localrawpath3/$rawfile2 2>/dev/null | awk '{ print \$5 }'` )
+         fi
+         if ! [ ${fails[1]} -eq -1 ]
+         then
+            sizefails=( `ls -lH $localfailpath/$rawfile2 2>/dev/null | awk '{ print \$5 }'` )
+         fi
+         if ! [ ${wue[1]} -eq -1 ]
+         then
+            sizewue=( `ssh operator@coma.astro.uni-wuerzburg.de "ls -l $wuerawpath/$rawfile2 2>/dev/null | awk '{ print \\\$5 }'"` )
+         fi
+         #if ! [ ${phido[1]} -eq -1 ]
+         #then
+         #   sizephido=( `ssh -i /home_nfs/isdc/fact_opr/.ssh/id_rsa.fact_opr.phido 129.217.160.201 "ls -l $phidorawpath/$rawfile2 2>/dev/null | awk '{ print \\\$5 }'"` )
+         #fi
+         
+         # check file sizes for run
+         # lp
+         if ! [ "$sizenewdaq" = "$sizedaq" ]
+         then
+            printprocesslog "WARN "$rawfile" newdaq("$sizenewdaq") daq("$sizedaq")"
+            echo "  "$rawfile" newdaq("$sizenewdaq") daq("$sizedaq")" >> $logfile 2>&1
+            daqdiffcounter=`echo " $daqdiffcounter + 1 " | bc -l `
+         else
+            daqokcounter=`echo " $daqokcounter + 1 " | bc -l `
+         fi
+         # dl00
+         if ! [ "$sizezip" = "$sizedl00" ] && ! [ ${dl00[1]} -eq -1 ]
+         then
+            printprocesslog "WARN "$rawfile2" data("$sizezip") dl00("$sizedl00")"
+            echo "  "$rawfile2" data("$sizezip") dl00("$sizedl00")" >> $logfile 2>&1
+            dl00diffcounter=`echo " $dl00diffcounter + 1 " | bc -l `
+         else
+            dl00okcounter=`echo " $dl00okcounter + 1 " | bc -l `
+         fi
+         #if [  "$sizezip" != "$sizearchive"  -a  ${archive[1]} -ne -1 ] || [  "$sizezip" != "$sizefails" -a ${fails[1]} -ne -1  ]
+         if [  "$sizezip" != "$sizearchive"  -a  ${archive[1]} -ne -1 -a "$sizearchive" != "" ] || [  "$sizezip" != "$sizefails" -a ${fails[1]} -ne -1 -a "$sizefails" != "" ]
+         then
+            printprocesslog "WARN "$rawfile2" data("$sizezip") archive("$sizearchive"/"$sizefails")"
+            echo "  "$rawfile2" data("$sizezip") archive("$sizearchive"/"$sizefails")" >> $logfile 2>&1
+            archivediffcounter=`echo " $archivediffcounter + 1 " | bc -l `
+         else
+            archiveokcounter=`echo " $archiveokcounter + 1 " | bc -l `
+         fi
+         # wue
+         if ! [ "$sizezip" = "$sizewue" ] && ! [ ${wue[1]} -eq -1 ]
+         then
+            printprocesslog "WARN "$rawfile2" data("$sizezip") wue("$sizewue")"
+            echo "  "$rawfile2" data("$sizezip") wue("$sizewue")" >> $logfile 2>&1
+            wuediffcounter=`echo " $wuediffcounter + 1 " | bc -l `
+         else
+            wueokcounter=`echo " $wueokcounter + 1 " | bc -l `
+         fi
+         ## phido
+         #if ! [ "$sizezip" = "$sizephido" ] && ! [ ${phido[1]} -eq -1 ]
+         #then
+         #   printprocesslog "WARN "$rawfile2" data("$sizezip") phido("$sizephido")"
+         #   echo "  "$rawfile2" data("$sizezip") phido("$sizephido")" >> $logfile 2>&1
+         #   phidodiffcounter=`echo " $phidodiffcounter + 1 " | bc -l `
+         #else
+         #   phidookcounter=`echo " $phidookcounter + 1 " | bc -l `
+         #fi
+      done
+      
+      #result=$result"-"
+      # raw files
+      if [ $daqokcounter -eq ${daq[0]} ]
+      then
+         numok=`echo " $numok + 1 " | bc -l `
+         result4="0"
+      else
+         result4="1"
+         numpb=`echo " $numpb + 1 " | bc -l `
+      fi
+      # zipped files
+      # dl00
+      if [ $dl00okcounter -eq ${dl00[0]} ]
+      then
+         result4=$result4"0"
+         numok=`echo " $numok + 1 " | bc -l `
+      else
+         result4=$result4"1"
+         numpb=`echo " $numpb + 1 " | bc -l `
+      fi
+      # archive
+      # daq had been used, because archive[0] doesn't include fails[0]
+      #if [ $archiveokcounter -eq ${newdaq[0]} ]
+      # archive[0] had been used, because newdaq[0] might be empty 
+      #   in case the data was taken on data
+      #if [ $archiveokcounter -eq ${archive[0]} ]
+      # compare with daq[0] as there should be always data on data
+      if [ $archiveokcounter -eq ${daq[0]} ]
+      then
+         result4=$result4"0"
+         numok=`echo " $numok + 1 " | bc -l `
+      else
+         result4=$result4"1"
+         numpb=`echo " $numpb + 1 " | bc -l `
+      fi
+      # wue
+      if [ $wueokcounter -eq ${wue[0]} ]
+      then
+         result4=$result4"0"
+         numok=`echo " $numok + 1 " | bc -l `
+      else
+         result4=$result4"1"
+         numpb=`echo " $numpb + 1 " | bc -l `
+      fi
+      ## phido
+      #if [ $phidookcounter -eq ${phido[0]} ]
+      #then
+      #   result4=$result4"0"
+      #   numok=`echo " $numok + 1 " | bc -l `
+      #else
+      #   result4=$result4"1"
+      #   numpb=`echo " $numpb + 1 " | bc -l `
+      #fi
+      printprocesslog "INFO "$daqokcounter" files are ok on daq (raw)."
+      printprocesslog "INFO "$dl00okcounter" files are ok on dl00."
+      printprocesslog "INFO "$wueokcounter" files are ok in Wue."
+      printprocesslog "INFO "$archiveokcounter" files are ok in the archive."
+      #printprocesslog "INFO "$phidookcounter" files are ok on Phido."
+      echo "INFO "$daqokcounter" files are ok on daq (raw)." >> $logfile 2>&1
+      echo "INFO "$dl00okcounter" files are ok on dl00." >> $logfile 2>&1
+      echo "INFO "$wueokcounter" files are ok in Wue." >> $logfile 2>&1
+      echo "INFO "$archiveokcounter" files are ok in the archive." >> $logfile 2>&1
+      #echo "INFO "$phidookcounter" files are ok on Phido." >> $logfile 2>&1
+      if [ $daqdiffcounter -gt 0 ]
+      then 
+         printprocesslog "WARN "$daqdiffcounter" files have a different size on daq (raw)."
+         echo "WARN "$daqdiffcounter" files have a different size on daq (raw)." >> $logfile 2>&1
+      else
+         printprocesslog "INFO "$daqdiffcounter" files have a different size on daq (raw)."
+         echo "INFO "$daqdiffcounter" files have a different size on daq (raw)." >> $logfile 2>&1
+      fi
+      if [ $dl00diffcounter -gt 0 ]
+      then 
+         printprocesslog "WARN "$dl00diffcounter" files have a different size on dl00."
+         echo "WARN "$dl00diffcounter" files have a different size on dl00." >> $logfile 2>&1
+      else
+         printprocesslog "INFO "$dl00diffcounter" files have a different size on dl00."
+         echo "INFO "$dl00diffcounter" files have a different size on dl00." >> $logfile 2>&1
+      fi
+      if [ $wuediffcounter -gt 0 ]
+      then 
+         printprocesslog "WARN "$wuediffcounter" files have a different size in Wue."
+         echo "WARN "$wuediffcounter" files have a different size in Wue." >> $logfile 2>&1
+      else
+         printprocesslog "INFO "$wuediffcounter" files have a different size in Wue."
+         echo "INFO "$wuediffcounter" files have a different size in Wue." >> $logfile 2>&1
+      fi
+      if [ $archivediffcounter -gt 0 ]
+      then 
+         printprocesslog "WARN "$archivediffcounter" files have a different size in the archive."
+         echo "WARN "$archivediffcounter" files have a different size in the archive." >> $logfile 2>&1
+      else
+         printprocesslog "INFO "$archivediffcounter" files have a different size in the archive."
+         echo "INFO "$archivediffcounter" files have a different size in the archive." >> $logfile 2>&1
+      fi
+      #printprocesslog "WARN "$phidodiffcounter" files have a different size on Phido."
+      #echo "WARN "$phidodiffcounter" files have a different size on Phido." >> $logfile 2>&1
+   fi
+   
+   # print summary: 
+   printprocesslog "INFO day ok: "$numdaysok
+   printprocesslog "INFO numok: "$numok
+   printprocesslog "INFO numpb: "$numpb
+   printprocesslog "result:"
+   printprocesslog "(qla-#files-dudir-db-filesize)"
+   #printprocesslog " ldawp-ldawp-ldawp-ldawp"
+   printprocesslog " q-ldaw-ldaw-ldaw-ldaw"
+   printprocesslog " "$result1"-"$result2"-"$result3"-"$result4
+   echo "INFO day ok: "$numdaysok >> $logfile 2>&1
+   echo "INFO numok: "$numok >> $logfile 2>&1
+   echo "INFO numpb: "$numpb >> $logfile 2>&1
+   echo "result:" >> $logfile 2>&1
+   echo "(qla-#files-dudir-db-filesize)" >> $logfile 2>&1
+   #echo " ldawp-ldawp-ldawp-ldawp" >> $logfile 2>&1
+   echo " q-ldaw-ldaw-ldaw-ldaw" >> $logfile 2>&1
+   echo " "$result1"-"$result2"-"$result3"-"$result4 >> $logfile 2>&1
+   sumdatanew=`echo " ( ${daq[1]} + ${zip[1]} ) / 1024 / 1024 / 1024 " | bc -l | cut -d. -f1`
+   sumdata=`echo " $sumdata + $sumdatanew " | bc -l | cut -d. -f1`
+   printprocesslog "checked alread "$sumdata" GB. "$sumdatanew" "${daq[1]}" "${zip[1]}
+   if [ $numpb -lt 4 ]
+   then 
+      numdaysok=`echo " $numdaysok + 1 " | bc -l `
+   fi
+
+   echo "SUMMARY for "$date
+   echo "-----------------------"
+   echo "SUMMARY for "$date >> $logfile 2>&1
+   echo "-----------------------" >> $logfile 2>&1
+   echo "SUMMARY for "$date >> $logfile2 2>&1
+   echo "-----------------------" >> $logfile2 2>&1
+   #echo "res1:"$result1
+   #echo "res3:"$result3
+   #echo "res4:"$result4
+   #echo "arch:"${archive[0]}
+   #echo "isdc:"$numisdc
+   if [ "$result1" = "0-0000" ] && [ "$result3" = "0000" ] && [ "$result4" = "0000" ]
+   then 
+      echo "  EVERYTHING is ok. "$date" can be deleted. "
+      echo "" >> $logfile 2>&1
+      echo "  EVERYTHING is ok. "$date" can be deleted. " >> $logfile 2>&1
+      echo "  EVERYTHING is ok. "$date" can be deleted. " >> $logfile2 2>&1
+      echo "    Details in the logfile "$logfile
+      echo "" >> $logfile 2>&1
+      echo "    Details in the logfile "$logfile >> $logfile2 2>&1
+      sendemail="yes"
+   else
+      if [ "$result1" = "0-0000" ] && [ "$result3" = "0010" ] && [ "$result4" = "0000" ] && [ ${archive[0]} -eq $numisdc ]
+      then 
+         echo "  "${fails[0]}" file(s) corrupt (fails folder), but files are transfered correctly. "
+         echo "  "${fails[0]}" file(s) corrupt (fails folder), but files are transfered correctly. " >> $logfile 2>&1
+         echo "  "${fails[0]}" file(s) corrupt (fails folder), but files are transfered correctly. " >> $logfile2 2>&1
+         echo "  TRANSFER is ok. "$date" can be deleted. "
+         echo "" >> $logfile 2>&1
+         echo "  TRANSFER is ok. "$date" can be deleted. " >> $logfile 2>&1
+         echo "  TRANSFER is ok. "$date" can be deleted. " >> $logfile2 2>&1
+         echo "    Details in the logfile "$logfile
+         echo "" >> $logfile 2>&1
+         echo "    Details in the logfile "$logfile >> $logfile2 2>&1
+         sendemail="yes"
+      else
+         echo "  "$date" is not yet transfered completely. Please check the logfile "$logfile
+         echo "" >> $logfile 2>&1
+         echo "  "$date" is not yet transfered completely. Please check the logfile "$logfile >> $logfile 2>&1
+         echo "  "$date" is not yet transfered completely. Please check the logfile "$logfile >> $logfile2 2>&1
+         echo "    resetting jobs in the DB might be needed."
+         echo "    resetting jobs in the DB might be needed." >> $logfile 2>&1
+         echo "    resetting jobs in the DB might be needed." >> $logfile2 2>&1
+      fi
+   fi
+   if [ $daqdiffcounter -gt 0 ]
+   then 
+      echo "  WARN "$daqdiffcounter" files have a different size on daq (raw)."
+      echo "  WARN "$daqdiffcounter" files have a different size on daq (raw)." >> $logfile 2>&1
+      echo "  WARN "$daqdiffcounter" files have a different size on daq (raw)." >> $logfile2 2>&1
+   fi
+   if [ $dl00diffcounter -gt 0 ]
+   then 
+      echo "  WARN "$dl00diffcounter" files have a different size on dl00."
+      echo "  WARN "$dl00diffcounter" files have a different size on dl00." >> $logfile 2>&1
+      echo "  WARN "$dl00diffcounter" files have a different size on dl00." >> $logfile2 2>&1
+   fi
+   if [ $wuediffcounter -gt 0 ]
+   then 
+      echo "  WARN "$wuediffcounter" files have a different size in Wue."
+      echo "  WARN "$wuediffcounter" files have a different size in Wue." >> $logfile 2>&1
+      echo "  WARN "$wuediffcounter" files have a different size in Wue." >> $logfile2 2>&1
+   fi
+   if [ $archivediffcounter -gt 0 ]
+   then 
+      echo "  WARN "$archivediffcounter" files have a different size in the archive."
+      echo "  WARN "$archivediffcounter" files have a different size in the archive." >> $logfile 2>&1
+      echo "  WARN "$archivediffcounter" files have a different size in the archive." >> $logfile2 2>&1
+   fi
+   if ! [ $numdatruns -eq $qla ]
+   then
+      echo "WARN not all data runs are processed yet by the QLA. " 
+      echo "WARN not all data runs are processed yet by the QLA. " >> $logfile 2>&1
+      echo "WARN not all data runs are processed yet by the QLA. " >> $logfile2 2>&1
+   fi
+   echo ""
+   echo "" >> $logfile 2>&1
+   echo "" >> $logfile2 2>&1
+done
+
+#sendemail="yes"
+if [ "$sendemail" = "yes" ]
+then
+   echo ""
+   echo "INFO send email with "$logfile2"to shift@fact-project.org "
+   printprocesslog "INFO send email with "$logfile2"to shift@fact-project.org "
+   cat $logfile2 | mail -s "testmail for info on deleting data" shift@fact-project.org
+fi 
+
+printprocesslog "INFO finished $0"
+
Index: branches/trigger_burst_research/Transfer/RsyncAuxLP.sh
===================================================================
--- branches/trigger_burst_research/Transfer/RsyncAuxLP.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/RsyncAuxLP.sh	(revision 18288)
@@ -0,0 +1,95 @@
+#!/bin/bash
+
+# this script has been written to run on La Palma on the machine daq
+#   i.e. paths are only working on this machine
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting "$0
+
+logfile=$runlogpath"/RsyncAuxLP-"$datetime".log"
+date >> $logfile
+
+# check if /daq is mounted on data
+if ! mount | grep data >> $logfile 2>&1
+then 
+   printprocesslog "ERROR /data is not mounted on daq => please mount it"
+   echo `date`": /data is not mounted on daq => please mount it"
+   finish
+fi
+
+# check if paths are available
+if ! ls /data/aux >/dev/null 2>&1
+then 
+   printprocesslog "ERROR /data/aux is not available."
+   finish
+fi
+if ! ls /loc_data/aux >/dev/null 2>&1
+then 
+   printprocesslog "ERROR /loc_data/aux is not available."
+   finish
+fi
+
+# do the rsync for the last 6 days
+dirs=( `ssh newdaq "find /loc_data/aux/ -mindepth 3 -type d | sort | tail -6"` )
+
+# do rsync for auxfiles in these directories
+for dir in ${dirs[@]}
+do 
+   echo "" >> $logfile 2>&1
+   # directory on daq
+   if ! [ -d $dir ]
+   then 
+      mkdir -pv $dir >> $logfile 2>&1
+   fi
+   # directory on data
+   dirdata=`echo $dir | sed -e 's/loc_//'`
+   if ! [ -d $dirdata ]
+   then 
+      mkdir -pv $dirdata >> $logfile 2>&1
+   fi
+   printprocesslog "INFO processing files in "$dir >> $logfile 2>&1
+   echo `date`": processing files in "$dir >> $logfile 2>&1
+   
+   # get current hour
+   hour=`date +%k`
+   # define bwlimit for rsync depending on the time: from 19-7h reduced bwlimit for rsync
+   if [ $hour -le 6 ] || [ $hour -ge 19 ]
+   then
+      # limit bw for rsync to 20 MB/s during night
+      bwlimit="--bwlimit=20000"
+      bwlimit="--bwlimit=5000"
+      printprocesslog "INFO rsync files with "$bwlimit >> $logfile 2>&1
+      echo "rsync files with "$bwlimit >> $logfile 2>&1
+   else 
+      # no bw limit during day
+      bwlimit=""
+      printprocesslog "INFO rsync files without bwlimit" >> $logfile 2>&1
+      echo "rsync files without bwlimit" >> $logfile 2>&1
+   fi
+
+   # rsync from newdaq to daq
+   rsyncserverdir=`echo $dir | sed -e 's/^\//172.16.100.100::/' -e 's/loc_data/newdaq/'` 
+   # old
+   #if ! /usr/bin/rsync -avxHP -T $rsynctempdir $bwlimit newdaq:$dir/ $dir >> $logfile 2>&1
+   # new (temporary until pb on daq is solved)
+   if ! /usr/bin/rsync -avxHP -T $rsynctempdir $bwlimit $rsyncserverdir/ $dir >> $logfile 2>&1
+   then
+      printprocesslog "CONNECTION problem rsyncing auxiliary files in "$dir" from newdaq to daq"
+      echo `date`": problem rsyncing auxiliary files in "$dir" from newdaq to daq" >> $logfile 2>&1
+   fi
+
+   # rsynctemp-dir -> use one on data!!! i.e. has to be /data/rsync_tmp
+   # rsync from daq to data
+   # old (rsynctempdirdata not tested)
+   #rsynctempdirdata=`echo $rsynctempdir | sed -e 's/loc_//'` 
+   #if ! /usr/bin/rsync -avxHP -T $rsynctempdirdata $bwlimit $dir/ $dirdata >> $logfile 2>&1
+   # new
+   rsyncserverdirdata=`echo $dir | sed -e 's/^\//data::/' -e 's/loc_//'` 
+   rsynctempdirdata=/rsync_tmp
+   if ! /usr/bin/rsync -avxHP -T $rsynctempdirdata $bwlimit $dir/ $rsyncserverdirdata >> $logfile 2>&1
+   then
+      printprocesslog "CONNECTION problem rsyncing auxiliary files in "$dir" from daq to data"
+      echo `date`": problem rsyncing auxiliary files in "$dir" from daq to data" >> $logfile 2>&1
+   fi
+done
+
Index: branches/trigger_burst_research/Transfer/RsyncAuxToISDC.sh
===================================================================
--- branches/trigger_burst_research/Transfer/RsyncAuxToISDC.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/RsyncAuxToISDC.sh	(revision 18288)
@@ -0,0 +1,71 @@
+#!/bin/bash
+
+# this script has been written to run on La Palma on the machine data
+#   i.e. paths are only working on this machine
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting "$0
+
+logfile=$runlogpath"/RsyncAuxToISDC-"$datetime".log"
+date >> $logfile
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 9 nights
+   getdates 9
+fi
+
+
+
+# do rsync for rawfiles of these dates
+for date in ${dates[@]}
+do 
+   echo "" >> $logfile 2>&1
+   echo $date >> $logfile 2>&1
+   # auxiliary data directories
+   auxdirlp=/loc_data/aux/./$date/ 
+   auxdirisdc=/scratch/from_lapalma/aux
+   
+   printprocesslog "INFO processing files in "$auxdirlp >> $logfile 2>&1
+   echo `date`": processing files in "$auxdirlp >> $logfile 2>&1
+   
+   # get current hour
+   hour=`date +%k`
+   # define bwlimit for rsync depending on the time: from 19-7h reduced bwlimit for rsync
+   if [ $hour -le 14 ] || [ $hour -ge 13 ]
+   then
+      # use once a day the -c option
+      #   normally files are compared by size and time
+      #   but for the daily files the size doesn't change when TSTOP is added
+      #   due to the one hour time difference it could happen that rsync
+      #     doesn't update the file when only checking time and size
+      # run with this option after lunch (LP time)
+      option="-c"
+      printprocesslog "INFO rsync data with option "$option >> $logfile 2>&1
+      echo "rsync data with option "$option >> $logfile 2>&1
+   else 
+      # no extra option
+      printprocesslog "INFO rsync data without extra option" >> $logfile 2>&1
+      echo "rsync data without extra option" >> $logfile 2>&1
+   fi
+   
+   if ! ssh fact@161.72.93.131 "ls $auxdirlp >/dev/null 2>&1"
+   then
+      printprocesslog "INFO "$auxdirlp" not available on data. "
+      continue
+   fi
+   
+   #rsync from daq to data
+   #if ! /usr/bin/rsync -avxHPu $bwlimit $auxdirdaq $auxdirdata >> $logfile 2>&1 # in case of adding checksum, only update files with rsync
+   if ! rsync -rltDvR -T $rsynctempdir --stats fact@161.72.93.131:$auxdirlp $auxdirisdc >> $logfile 2>&1
+   then
+      printprocesslog "CONNECTION problem rsyncing auxiliary data for "$date" from La Palma to ISDC"
+      echo `date`": problem rsyncing auxiliary data for "$date" from La Palma to ISDC" >> $logfile 2>&1
+   fi
+done
+
Index: branches/trigger_burst_research/Transfer/RsyncDBToWue.sh
===================================================================
--- branches/trigger_burst_research/Transfer/RsyncDBToWue.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/RsyncDBToWue.sh	(revision 18288)
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+today=`date +%F`
+logfile=/home/`whoami`/DataCheck/log/RsyncDBToWue$today.log
+
+dbdirlp=/home/fact/DB_Backup
+dbdirwue=/home/operator/budb/fact_from_lp
+echo "" >> $logfile 2>&1
+echo "" >> $logfile 2>&1
+echo "" >> $logfile 2>&1
+echo `date`": processing files in "$dbdirlp >> $logfile 2>&1
+
+#rsync from gate to coma
+if ! /usr/bin/rsync -avxP $dbdirlp operator@coma.astro.uni-wuerzburg.de:$dbdirwue >> $logfile 2>&1
+then
+   printprocesslog "CONNECTION problem rsyncing DB from LP to Wue"
+   echo `date`": problem rsyncing database from LP to Wuerzburg" >> $logfile 2>&1
+fi
+
+
Index: branches/trigger_burst_research/Transfer/RsyncRawLP.sh
===================================================================
--- branches/trigger_burst_research/Transfer/RsyncRawLP.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/RsyncRawLP.sh	(revision 18288)
@@ -0,0 +1,98 @@
+#!/bin/bash
+
+# this script has been written to run on La Palma on the machine data
+#   i.e. paths are only working on this machine
+
+echo `dirname $0`" is not to be used anymore."
+exit
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting "$0
+
+logfile=$runlogpath"/RsyncRawLP-"$datetime".log"
+date >> $logfile
+
+# check if /daq is mounted on data
+if ! mount | grep daq >> $logfile 2>&1
+then 
+   printprocesslog "ERROR /daq is not mounted on data => please mount it"
+   echo `date`": /daq is not mounted on data => please mount it"
+   finish
+fi
+
+# check if paths are available
+if ! ls /daq/raw >/dev/null 2>&1
+then 
+   printprocesslog "ERROR /daq/raw is not available."
+   finish
+fi
+if ! ls /loc_data/raw >/dev/null 2>&1
+then 
+   printprocesslog "ERROR /loc_data/raw is not available."
+   finish
+fi
+
+# get last 3, 6 or 9 nights
+#dates=( `date +%Y/%m/%d --date="-12hour"` `date +%Y/%m/%d --date="-36hour"` `date +%Y/%m/%d --date="-60hour"` \
+#        `date +%Y/%m/%d --date="-84hour"` `date +%Y/%m/%d --date="-108hour"` `date +%Y/%m/%d --date="-132hour"` \
+#        `date +%Y/%m/%d --date="-156hour"` `date +%Y/%m/%d --date="-180hour"` `date +%Y/%m/%d --date="-204hour"` \
+#        )
+
+dates=( 
+        `date +%Y/%m/%d --date="-204hour"`  `date +%Y/%m/%d --date="-180hour"` `date +%Y/%m/%d --date="-156hour"` \
+        `date +%Y/%m/%d --date="-132hour"`  `date +%Y/%m/%d --date="-108hour"` `date +%Y/%m/%d --date="-84hour"`  \
+        `date +%Y/%m/%d --date="-60hour"`   `date +%Y/%m/%d --date="-36hour"`  `date +%Y/%m/%d --date="-12hour"`  \
+        )
+
+# do rsync for rawfiles of these dates
+for date in ${dates[@]}
+do 
+   echo "" >> $logfile 2>&1
+   # raw data directories on data
+   rawdirdaq=/daq/raw/$date/ # /loc_data from daq via nfs on data
+   rawdirdata=/loc_data/raw/$date
+   if ! [ -d $rawdirdaq ]
+   then 
+      echo `date`": "$rawdirdaq" not found. No data available on daq." >> $logfile 2>&1
+      if [ -d $rawdirdata ]
+      then
+         printprocesslog "WARN "$rawdirdaq" not found, but "$rawdirdata". Data probably taken on data." >> $logfile 2>&1
+      else
+         printprocesslog "INFO "$rawdirdaq" not found. No data available on daq." >> $logfile 2>&1
+      fi
+      continue
+   fi
+   if ! [ -d $rawdirdata ]
+   then 
+      mkdir -pv $rawdirdata >> $logfile 2>&1
+   fi
+   printprocesslog "INFO processing files in "$rawdirdaq >> $logfile 2>&1
+   echo `date`": processing files in "$rawdirdaq >> $logfile 2>&1
+
+   # get current hour
+   hour=`date +%k`
+   # define bwlimit for rsync depending on the time: from 19-7h reduced bwlimit for rsync
+   if [ $hour -le 6 ] || [ $hour -ge 19 ]
+   then
+      # limit bw for rsync to 20 MB/s during night
+      bwlimit="--bwlimit=20000"
+      printprocesslog "INFO rsync data with "$bwlimit >> $logfile 2>&1
+      echo "rsync data with "$bwlimit >> $logfile 2>&1
+   else
+      # no bw limit during day
+      bwlimit=""
+      printprocesslog "INFO rsync data without bwlimit" >> $logfile 2>&1
+      echo "rsync data without bwlimit" >> $logfile 2>&1
+   fi
+
+   #rsync from daq to data
+   #if ! /usr/bin/rsync -avxHPu $bwlimit $rawdirdaq $rawdirdata >> $logfile 2>&1 # in case of adding checksum, only update files with rsync
+   if ! /usr/bin/rsync -avxHP -T $rsynctempdir $bwlimit $rawdirdaq $rawdirdata >> $logfile 2>&1
+   then
+      printprocesslog "WARN problem rsyncing rawdata for "$date" from daq to data"
+      echo `date`": problem rsyncing rawdata for "$date" from daq to data"
+   fi
+done
+
+finish
+
Index: branches/trigger_burst_research/Transfer/RsyncRawToISDC.sh
===================================================================
--- branches/trigger_burst_research/Transfer/RsyncRawToISDC.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/RsyncRawToISDC.sh	(revision 18288)
@@ -0,0 +1,161 @@
+#!/bin/bash
+#
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting $0"
+program=RsyncRawToISDC
+step=RawFileRsyncedISDC
+pwfile=/home_nfs/isdc/fact_opr/rsync.pwd
+
+set -C
+
+logfile=$runlogpath"/RsyncRawToISDC-"$datetime".log"
+date >> $logfile 2>&1
+
+## check if script is already running
+#lockfile=$lockpath/lock-$program.txt
+#checklock 
+
+# stop transfer when less than 100 GB are left
+function check_scratch()
+{
+   diskusage2=( `df -P /scratch | grep scratch ` )
+   # check if more than X GB are left on /scratch
+   if [ ${diskusage2[3]} -lt 100000 ] 
+   then
+      printprocesslog "DISK less than 100 GB left on /scratch ("${diskusage2[3]}")"
+      #echo "WARN less than 100 GB left on /scratch ("${diskusage2[3]}")"
+      echo "WARN less than 100 GB left on /scratch ("${diskusage2[3]}")" >> $logfile2 2>&1
+      finish
+   fi
+}
+
+check_scratch
+
+# get todo list
+gettodo 
+
+for (( s=0 ; s < $num ; s++ ))
+do
+   check_scratch
+   night=${primaries[$s+$s]}
+   runid=${primaries[$s+$s+1]}
+   nightpath=`echo $night | cut -c 1-4`"/"`echo $night | cut -c 5-6`"/"`echo $night | cut -c 7-8`
+   lppath="/loc_data/zipraw/"$nightpath
+   nightrun=$night"_"`printf %03d $runid`
+   lpfile=`ssh fact@161.72.93.131 "find $lppath -type f -regex '.*$nightrun[.]fits[.][fg]z'"`
+   if [ "$lpfile" == "" ]
+   then
+      #echo $nightrun" not yet available on data."
+      printprocesslog "INFO "$nightrun" not yet available on data."
+      continue
+   else
+      filename=`basename $lpfile`
+   fi
+
+   setstatus "start" 
+
+   rawfilelp="/loc_data/zipraw/./"$nightpath"/"$filename
+   rawdirisdc="/scratch/from_lapalma/raw/"
+   rawfileisdc=$rawdirisdc$nightpath"/"$filename
+   ingestlinkpath="/scratch/ingest_links/raw/"$nightpath
+   makedir $ingestlinkpath >> $logfile 2>&1
+   ingestlink=$ingestlinkpath"/"$filename
+
+   if ! ssh fact@161.72.93.131 "ls $rawfilelp >/dev/null 2>&1"
+   then
+      #echo $rawfilelp" not yet available on data."
+      printprocesslog "INFO "$rawfilelp" not yet available on data."
+      check="no"
+      setstatus "stop"
+      continue
+   fi
+
+   # for new rsync
+   #   remove / from dir
+   #   change name of dir (else rsync-server doesn't accept it
+   #   test if trick with file/dir is working
+   #echo "INFO rsync rawfile "$rawfilelp" to ISDC "$rawdirisdc
+   printprocesslog "INFO rsync rawfile "$rawfilelp" to ISDC "$rawdirisdc
+   #echo "INFO rsync rawfile "$rawfilelp" to ISDC "$rawdirisdc >> $logfile 2>&1
+   #rsync -rltDvR --partial --stats --password-file=$pwfile factdata@161.72.93.131::$rawfilelp $rawdirisdc >> $logfile 2>&1
+   #echo "rsync -rltDvR -T $rsynctempdir --partial --stats fact@161.72.93.131:$rawfilelp $rawdirisdc >> $logfile 2>&1"
+   rsync -rltDvR -T $rsynctempdir --partial --stats fact@161.72.93.131:$rawfilelp $rawdirisdc >> $logfile 2>&1
+   check1=$?
+
+   case $check1 in
+      0)   printprocesslog "INFO rawfile "$rawfilelp" transferred successfully to ISDC."
+           if ! [ -L $ingestlink ]
+           then 
+              printprocesslog "INFO create link for "$rawfileisdc
+              ln -sv $rawfileisdc $ingestlink >> $logfile 2>&1
+              check2=$?
+              case $check2 in
+                 0)   printprocesslog "INFO link for  "$rawfileisdc" created successfully."
+                      ;;
+                 *)   printprocesslog "WARN link for "$rawfileisdc" could not be created (check2="$check2")."
+                      check=$check2
+                      ;;
+              esac
+           else
+              printprocesslog "INFO link "$ingestlink" already existing."
+           fi
+           ;;
+      *)   printprocesslog "CONNECTION "$rawfilelp" could not be transferred to ISDC (check1="$check1")."
+           check=$check1
+           ;;
+   esac
+   
+   query="SELECT fHasDrsFile from RunInfo WHERE fNight="$night" AND fRunID="$runid
+   numdrs=`sendquery`
+   if [ $numdrs -gt 0 ]
+   then 
+      drsfilelp="/loc_data/zipraw/./"$nightpath"/"$nightrun".drs.fits.gz"
+      drsfileisdc=$rawdirisdc$nightpath"/"$nightrun".drs.fits.gz"
+      drsingestlink=$ingestlinkpath"/"$nightrun".drs.fits.gz"
+
+      if ! ssh fact@161.72.93.131 "ls $drsfilelp >/dev/null 2>&1"
+      then
+         #echo $drsfilelp" not yet available on data."
+         printprocesslog "INFO "$drsfilelp" not yet available on data."
+         check="no"
+         setstatus "stop"
+         continue
+      fi
+
+
+      printprocesslog "INFO rsync drsfile "$drsfilelp" to ISDC "$rawdirisdc
+      echo "INFO rsync drsfile "$drsfilelp" to ISDC "$rawdirisdc >> $logfile 2>&1
+      #rsync -rltDvR --partial --stats --password-file=$pwfile factdata@161.72.93.131::$rawfilelp $rawdirisdc >> $logfile 2>&1
+      rsync -rltDvR -T $rsynctempdir --partial --stats fact@161.72.93.131:$drsfilelp $rawdirisdc >> $logfile 2>&1
+      check3=$?
+
+      case $check3 in
+         0)   printprocesslog "INFO drsfile "$drsfilelp" transferred successfully to ISDC."
+              if ! [ -L $drsingestlink ]
+              then 
+                 printprocesslog "INFO create link for "$drsfileisdc
+                 ln -sv $drsfileisdc $drsingestlink >> $logfile 2>&1
+                 check4=$?
+                 case $check4 in
+                    0)   printprocesslog "INFO link for  "$drsfileisdc" created successfully."
+                         ;;
+                    *)   printprocesslog "WARN link for "$drsfileisdc" could not be created (check4="$check4")."
+                         check=$check4
+                         ;;
+                 esac
+              else
+                 printprocesslog "INFO link "$drsingestlink" already existing."
+              fi
+              ;;
+         *)   printprocesslog "CONNECTION "$drsfilelp" could not be transferred to ISDC (check3="$check3")."
+              check=$check3
+              ;;
+      esac
+   fi
+
+   setstatus "stop" 
+done
+
+finish 
+
Index: branches/trigger_burst_research/Transfer/ZipRawData.sh
===================================================================
--- branches/trigger_burst_research/Transfer/ZipRawData.sh	(revision 18288)
+++ branches/trigger_burst_research/Transfer/ZipRawData.sh	(revision 18288)
@@ -0,0 +1,177 @@
+#!/bin/bash
+
+# this script has been written to run on La Palma on the machine data
+#   i.e. paths are only working on this machine
+
+source `dirname $0`/../Sourcefile.sh
+printprocesslog "INFO starting "$0
+
+logfile=$runlogpath"/ZipRawLP-"$datetime".log"
+date >> $logfile
+
+if ! ls /daq/raw >/dev/null 2>&1
+then 
+   printprocesslog "ERROR /daq/raw is not available."
+   finish
+fi
+
+# get dates
+if [ "$certaindate" != "" ]
+then
+   checkstring=`echo $certaindate | grep -E -o '^20[0-9][0-9]\/[01][0-9]\/[0-3][0-9]$'`
+   if [ "$checkstring" = "" ]
+   then
+      echo "Please give the variable certaindate in the correct format (YYYY/MM/DD)"
+      finish
+   fi
+   getdates $certaindate
+else
+   # get all night
+   #getdates "all"
+   # get last 3 nights if hour between 7 and 19h, else only current night
+   getdates 3 7 19
+fi
+
+# not needed anymore as ftools are not used anymore
+## setup to use ftools
+#source $HEADAS/headas-init.sh
+
+# files younger than $delaytime are not processed
+delaytime=5
+
+# do rsync for rawfiles of these dates
+for date in ${dates[@]}
+do 
+   echo "" >> $logfile 2>&1
+   rawdir=/daq/raw/$date
+   echo `date`": processing files in "$rawdir >> $logfile 2>&1
+   printprocesslog "INFO processing files in "$rawdir
+   #echo "INFO processing files in "$rawdir
+   # check if data are available from that night
+   if ! [ -d $rawdir ]
+   then
+      echo `date`": no data available in "$rawdir >> $logfile 2>&1
+      printprocesslog "INFO no data available in "$rawdir
+      continue
+   fi
+
+   # find all fits-files starting with the oldest file
+   echo `date`": finding files to be zipped in $rawdir..." >> $logfile 2>&1
+   printprocesslog "INFO finding files to be zipped in "$rawdir"..."
+   fitsfiles=`find $rawdir -type f -regex '.*[.]fits[.]?[g]?[f]?[z]?' | sort `
+
+   if [ ${#fitsfiles[@]} -eq 0 ]
+   then
+      echo `date`": no files to be zipped in $rawdir..." >> $logfile 2>&1
+      printprocesslog "INFO no files to be zipped in "$rawdir"..."
+      continue
+   fi
+
+   zipdir=/loc_data/zipraw/$date
+   # create output directory for zip
+   makedir $zipdir >> $logfile 2>&1
+
+   # loop to zip files
+   echo `date`": zipping files in $rawdir..." >> $logfile 2>&1
+   printprocesslog "INFO zipping files in "$rawdir"..."
+   for file in $fitsfiles
+   do
+      # check if more than ~ 10 GB are left on /loc_data
+      diskusage=( `df -P /loc_data | grep /loc_data ` )
+      if [ ${diskusage[3]} -lt 10000000 ] 
+      then
+         echo "WARN less than 10 GB left on /loc_data on data ("${diskusage[3]}")" >> $logfile 2>&1
+         printprocesslog "WARN less than 10 GB left on /loc_data on data ("${diskusage[3]}")"
+         finish
+      fi
+      
+      # check if raw file was accessed in the last $delaytime minutes
+      isnew=`find $file -amin -$delaytime`
+      if [ "$isnew" != "" ]
+      then
+         echo $file" is not older than $delaytime min => continue" >> $logfile 2>&1
+         printprocesslog "INFO "$file" is not older than $delaytime min => continue"
+         continue
+      fi
+
+      iszipped=`echo $file | grep -o fz`
+      if [ "$iszipped" == "fz" ]
+      then
+         # treat already compressed files
+         printprocesslog "DEBUG treat compressed file "$file
+
+         # filename for file on data
+         zipfile=`echo $file | sed -e 's/daq/loc_data/g' -e 's/raw/zipraw/' `
+         zipfiletmp=`echo $file | sed -e 's/daq/loc_data/g' -e 's/raw/zipraw/' -e 's/fits.fz/fits.fz.tmp/'`
+         # check if file on data already exists
+         if [ -e $zipfile ]
+         then
+            continue
+         fi
+
+         echo `date`": copying "$file" to "$zipfile" ..." >> $logfile 2>&1
+         printprocesslog "INFO copying "$file" to "$zipfile" ..."
+         #echo "INFO zipping "$file" to "$zipfile" ..."
+         # read setup again to allow for updates of variables defining transfer
+         source `dirname $0`/../Sourcefile.sh
+         # zip file to stdout and pipe it to outputfile
+         echo "pv --rate-limit $limitpigz $file > $zipfiletmp" >> $logfile 2>&1
+         pv --rate-limit $limitpigz $file > $zipfiletmp
+         statuspv=$?
+         printprocesslog "DEBUG pvstatus:"$statuspv
+         echo " pvstatus:"$statuspv >> $logfile 2>&1
+         if [ $statuspv -eq 0 ] 
+         then
+            # if successful, move temporary to final zipfile
+            printprocesslog "INFO move "$zipfiletmp" to "$zipfile" ..."
+            mv -v $zipfiletmp $zipfile >> $logfile 2>&1
+         else
+            # if not successful, remove temporary zipfile
+            printprocesslog "WARN pv failed (statuspv:"$statuspv")."
+            echo "WARN pv failed (PIPESTATUS:"$statuspv")." >> $logfile 2>&1
+            printprocesslog "INFO remove "$zipfiletmp"..."
+            rm -v $zipfiletmp >> $logfile 2>&1
+            finish
+         fi
+      else
+         # treat uncompressed files
+         printprocesslog "DEBUG treat uncompressed file "$file
+
+         # filename for temporary and final zipfile
+         zipfile=`echo $file | sed -e 's/daq/loc_data/g' -e 's/raw/zipraw/' -e 's/fits/fits.gz/'`
+         zipfiletmp=`echo $file | sed -e 's/daq/loc_data/g' -e 's/raw/zipraw/' -e 's/fits/fits.tmp.gz/'`
+         # check if zipped file already exists
+         if [ -e $zipfile ]
+         then
+            continue
+         fi
+
+         echo `date`": zipping "$file" to "$zipfile" ..." >> $logfile 2>&1
+         printprocesslog "INFO zipping "$file" to "$zipfile" ..."
+         #echo "INFO zipping "$file" to "$zipfile" ..."
+         # read setup again to allow for updates of variables defining transfer
+         source `dirname $0`/../Sourcefile.sh
+         # zip file to stdout and pipe it to outputfile
+         echo "pv --rate-limit $limitpigz $file | pigz -1 -c -f -p $numprocpigz > $zipfiletmp" >> $logfile 2>&1
+         pv --rate-limit $limitpigz $file | pigz -1 -c -f -p $numprocpigz > $zipfiletmp
+         statuspigz=( `echo ${PIPESTATUS[@]}` )
+         printprocesslog "DEBUG PIPESTATUS:"${statuspigz[@]}
+         echo " PIPESTATUS:"${statuspigz[@]} >> $logfile 2>&1
+         if [ ${statuspigz[0]} -eq 0 ] && [ ${statuspigz[1]} -eq 0 ]
+         then
+            # if successful, move temporary to final zipfile
+            printprocesslog "INFO move "$zipfiletmp" to "$zipfile" ..."
+            mv -v $zipfiletmp $zipfile >> $logfile 2>&1
+         else
+            # if not successful, remove temporary zipfile
+            printprocesslog "WARN pigz or pv failed (PIPESTATUS:"${statuspigz[@]}")."
+            echo "WARN pigz or pv failed (PIPESTATUS:"${statuspigz[@]}")." >> $logfile 2>&1
+            printprocesslog "INFO remove "$zipfiletmp"..."
+            rm -v $zipfiletmp >> $logfile 2>&1
+            finish
+         fi
+      fi
+   done
+done
+echo "finished zipping..." >> $logfile 2>&1
+finish
