#!/bin/bash # # This a resource file for the scripts, in which paths, variables # and setups are defined # # This setup file is for the machine data in La Palma # it is also used for the other machines which share the home of data # # setup to use ftools export HEADAS=/opt/heasoft-6.11/x86_64-unknown-linux-gnu-libc2.13-0/ export HEADASPROMPT=/dev/null # setup to use ROOT root=/opt/root_v5.34.10/bin/thisroot.sh source $root # software versions export factpath=/home/fact/operation # path where programs of FACT++ are linked export mars=/home/fact/SW.automatic.processing/Mars.svn.2014.05.26 # site processingsite=lp sitekey=3 storagesite=wue # logging and setup logpath=/home/fact/logs.automatic.processing/autologs lockpath=/home/fact/logs.automatic.processing/locks #setuppath=/magic/simulated/setup rsynctempdir=/loc_data/rsync_tmp if ! [ -d $rsynctempdir ] then mkdir $rsynctempdir fi # data paths anapath=/loc_data/analysis # on daq #anapath=/newdaq/analysis_bu # temporarily to newdaq drstimepath=$anapath/drs_time_calib auxdata=/loc_data/aux auxdata_for_sed=$(printf "%s\n" "$auxdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g') rawdata=/loc_data/raw rawdata_for_sed=$(printf "%s\n" "$rawdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g') ziprawdata=/loc_data/zipraw ziprawdata_for_sed=$(printf "%s\n" "$ziprawdata" | sed 's/[][\.*^$(){}?+|/]/\\&/g') # get paths for mars, macros and scripts # in case the variable is not yet set macrospath=$mars/datacenter/macros scriptspath=$mars/datacenter/scripts # rcfiles # dependencies of steps steps=$mars/resources/steps_fact.rc # file with db information sqlrc=$mars/sql.rc #addresses to which information about full disks is sent deladrs="shift@fact-project.org" #addresses to which the errors are sent erradrs="dorner@astro.uni-wuerzburg.de" #addresses to which the changes are sent adrs="dorner@astro.uni-wuerzburg.de" # # setup QLA # # setup for Step1.sh # disklimitdaq: transfer and qla don't start when less than this is available on /daq # with more than 90% disk usage the data taking is affected disklimitdaq=800000000 # bandwidth limit for transfer from newdaq to daq bwlimit=90000 # number of RunCallisto.sh started on daq #numruncallistos=20 numruncallistos=8 # setup RunCallisto.sh # number of callisto.C (else RunCallisto.sh is doing sleep) running on daq #numcallistos=8 numcallistos=5 # seconds waiting before checking again number of callistos callistowait=60 # setup for ZipRawData.sh numprocpigz=3 # number of processes used for pigz limitpigz="90M" # transfer from daq to data # setup for FillNumEvts.sh and Step3.sh resulttable1="AnalysisResultsRunLP" resulttable2="AnalysisResultsNightLP" firstnight=20121213