Index: /trunk/DataCheck/Cron/crontab.newdata
===================================================================
--- /trunk/DataCheck/Cron/crontab.newdata	(revision 18861)
+++ /trunk/DataCheck/Cron/crontab.newdata	(revision 18861)
@@ -0,0 +1,51 @@
+MAILTO=dorner@astro.uni-wuerzburg.de
+AUTOMATIONSETUP="fact.lp.data"
+# cronjobs for newdata
+## check network status 
+#*/15 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNetworkStatus.sh
+# check time offset of machine
+0 */6 * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckNTPDate.sh
+# clean-up /scratch (make sure that Step1.sh for previous night is not running after that)
+10 18 * * * /home/fact/SW.automatic.processing/DataCheck/Transfer/CleanupNewdata.sh
+# rsync auxfiles from newdaq via daq to data
+*/15 * * * * if ! ps aux | grep RsyncAuxLP | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Transfer/RsyncAuxLP.sh ; fi
+# first step of the QLA: rsync newdaq->daq, callisto, star, starting also ganymed
+#   (make sure that it is running for previous night only before cleaning /scratch)
+*/5 19-23,0-7 * * * if ! ps aux | grep Step1 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step1.sh ; fi
+0 8-17 * * * if ! ps aux | grep Step1 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step1.sh ; fi
+# compress data
+#   (make sure that it is running for previous night only before cleaning /scratch)
+*/5 19-23,0-7 * * * if ! ps aux | grep ZipRawData | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Transfer/ZipRawData.sh ; fi
+16 8-17 * * * if ! ps aux | grep ZipRawData | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Transfer/ZipRawData.sh ; fi
+# third step of the QLA: creating lightcurves for the webpage
+2-59/5 19-23,0-7 * * * if ! ps aux | grep Step3 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step3.sh run ; fi
+0 8-18 * * * if ! ps aux | grep Step3 | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/Step3.sh run ; fi
+# check for flares
+*/5 19-23,0-7 * * * if ! ps aux | grep FlareAlerts | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/QuickLook/FlareAlerts.sh ; fi
+# create quality plots
+5 * * * * /home/fact/SW.automatic.processing/DataCheck/DataCheck/PlotQuality.sh
+# fill ratescans to database and plot them for website
+10 * * * * /home/fact/SW.automatic.processing/DataCheck/Processing/FillRatescans.sh
+15 * * * * /home/fact/SW.automatic.processing/DataCheck/DataCheck/PlotRatescans.sh
+# backup of the QLA results to newdaq
+0 8,10,12 * * * /home/fact/SW.automatic.processing/DataCheck/Transfer/BackupQLA.sh
+# get some images for later data check every 5 minutes during night
+# lidcam
+*/5 18-23,0-8 * * * mkdir -p /data1/analysis/lidcam_images/`date -u +\%Y/\%m/\%d` && wget -q -O /data1/analysis/lidcam_images/`date -u +\%Y/\%m/\%d`/lidcamimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg https://www.fact-project.org/cam/lidcam.php
+# fact skycam
+*/5 18-23,0-8 * * * mkdir -p /data1/analysis/skycam_images/`date -u +\%Y/\%m/\%d` && wget -q -O /data1/analysis/skycam_images/`date -u +\%Y/\%m/\%d`/skycamimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg https://www.fact-project.org/cam/skycam.php
+# concam 
+*/5 18-23,0-8 * * *  mkdir -p /data1/analysis/concam_images/`date -u +\%Y/\%m/\%d` && wget -q -O /data1/analysis/concam_images/`date -u +\%Y/\%m/\%d`/concam_allskyimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg http://catserver.ing.iac.es/weather/archive/concam/concam_labels.png
+# gtc allsky image 
+*/5 18-23,0-8 * * *  mkdir -p /data1/analysis/gtc_images/`date -u +\%Y/\%m/\%d` && wget -q -O /data1/analysis/gtc_images/`date -u +\%Y/\%m/\%d`/gtc_allskyimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg http://www.gtc.iac.es/multimedia/netcam/camaraAllSky.jpg
+# magic allsky image 
+*/5 18-23,0-8 * * *  mkdir -p /data1/analysis/magic_images/`date -u +\%Y/\%m/\%d` && wget -q -O /data1/analysis/magic_images/`date -u +\%Y/\%m/\%d`/magic_allskyimage_`date -u +\%Y\%m\%d_\%H\%M\%S`.jpg http://www.magic.iac.es/site/weather/AllSkyCurrentImage.JPG
+# fill moon information
+42 * * * * if ! ps aux | grep FillMoonInfo | grep -v grep >/dev/null 2>&1  ; then /home/fact/SW.automatic.processing/DataCheck/Processing/FillMoonInfo.sh ; fi
+# check disk usage 
+50 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckDU.sh
+# check global log 
+#   once per day for ERROR and WARN
+10 0 * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckLogs.sh day
+#   once per hour for CONNECTION and DISK
+10 * * * * /home/fact/SW.automatic.processing/DataCheck/Monitoring/CheckLogs.sh hour
