Index: trunk/DataCheck/Tools/get_data.sh
===================================================================
--- trunk/DataCheck/Tools/get_data.sh	(revision 19031)
+++ trunk/DataCheck/Tools/get_data.sh	(revision 19032)
@@ -1,5 +1,41 @@
 #!/bin/bash
 
-# todo
+# ----------------------------------------------------------------
+#  README README README README README README README README README
+# ----------------------------------------------------------------
+#                                                                #
+# To use this script, you need                                   #
+#   - a computer with access to the FACT database in La Palma    #
+#   - a file with the password of a valid mysql-user             #
+#   - to define the setup below for                              #
+#     a) the DB access                                           #
+#     b) the data you want to have                               #
+#                                                                #
+# To define the setup, search for SETUP in this script and       #
+#   read the details there (starting roughly line 295)           #
+#                                                                #
+# Per data request, you get up to 3 files:                       #
+#   *_internal.dat                                               #
+#   *_collaborators.dat                                          #
+#   *_external.dat (only if binning is 20min or nightly)         #
+#                                                                #
+# Please have in mind that this started as a tool for myself,    #
+#  then others started using it. Also the script is not yet      #
+#  finalized. In case you find problems and/or have a feature    #
+#  request, please send and email to dorner@astro.uni-wuerzburg.de                                 #
+#                                                                #
+# ----------------------------------------------------------------
+#  README README README README README README README README README
+# ----------------------------------------------------------------
+
+
+
+
+
+# ToDo (notes DD): 
+# ----------------
+# - add file for collaborators
+# - update columns and content for 3 types of files
+# - limit creation of file for externals to 
 # - update function for correction
 # - update CU for QLA
@@ -9,4 +45,38 @@
 # - check crab flux
 # - add E2dNdE?
+# - functionality to determine start time for seaon-binning
+# - can get_data.sh / Send_Data*.sh be combined? 
+#   get_data.sh should be able to run stand-alone and be kept simple for any user
+
+#
+# content of files (wish list):
+# -----------------------------
+# REMARK: keep order of columns to allow for reading with TGraph directly from file: X Y EX EY
+# 
+# internal 
+# --------
+# time: time, delta time, start, stop, ontime
+# flux: excrate, excerr, corrate, corerr, CU CUerr, flux, fluxerr, 
+# other info on flux: signif, cu-factor, num exc, num sig, num bg
+# other info: zd th R750cor R750ref
+# 
+# external (allow only 20min and nightly binning)
+# --------
+# time: time, delta time, start, stop
+# flux: excrate, excerr
+#
+# collaborators
+# -------------
+# time: time, delta time, start, stop, ontime
+# flux: excrate, excerr, corrate, corerr, flux, flux-err, significance
+# 
+# additional information to put:
+# ------------------------------
+# timestamp of creation
+# query (for debugging / answering questions) 
+# policy (adapted for internal/collaborators/external) [define in files to be used also by Send_Data*.sh
+# 
+
+
 
 function get_results()
@@ -33,5 +103,6 @@
    where=$where" "$dch
    
-   cufactor=" Avg(25.2) "
+   # 
+   cufactor=" Avg(CUQLA(fNight)) "
    crabflux="3.37e-11"
    fluxprec=13
@@ -108,24 +179,4 @@
    fluxerr2="$cuerr2*"$crabflux
 
-# internal 
-# --------
-# timeselect:
-#  mjdstar, mjdstop, mjdmean, ontime
-# excselect:
-#  excrate, excerr
-#  corrected: excrate, excerr
-#  CU CUerr
-#  flux, fluxerr
-# addselect: 
-#  signif
-#  num exc, num sig, num bg
-# other info: zd? th?
-# 
-# 
-# external
-# --------
-# time, delta time, start, stop
-# corr-excrate, corr-excerr
-# flux, flux-err
    
    if [ $bin -le 0 ]
@@ -242,17 +293,26 @@
 }
 
-# setup
-# db
-sqlpw=/home/$USER/.mysql.pw # file with mysql credentials
-#host=lp-fact
-host=10.0.100.21
-#host=localhost
-dbname=factdata # name of database
+# SETUP:
+# ------
+# DB SETUP:
+# ---------
+# path to file with mysql password
+sqlpw=/home/$USER/.mysql.pw 
+# host of mysql server with FACT DB
+#host=lp-fact # ISDC
+host=10.0.100.21 # LP or LP via vpn
+#host=localhost # your local machine in case you have a copy of DB
+# name of database
+dbname=factdata 
 # defaults for zd and threshold
 zdmax=90 # all data
 thmax=1500 # all data
+#
+# SETUP for your data: 
+# --------------------
 # output path
 path=`dirname $0`
 datapath=$path"/data"
+# create directory for data files 
 if ! [ -e $datapath ]
 then
@@ -264,8 +324,8 @@
 timeunit=mjd
 # time binning 
-# positive values: minutes
-# negative values: days
-# special case 0: period
-# for season binning choose -365 and according start date
+#  positive values: minutes
+#  negative values: days
+#  special case 0: period
+#  for season binning choose -365 and according start date
 #bin=20 # minutes
 #bin=0 # period
@@ -288,8 +348,11 @@
 # 501 MAGIC 
 source=2
-name="Mrk501_2014JulAug"
-bin=-1
-nightmin=20140714
-nightmax=20140805
+name="Mrk501_2014_QLA"
+bin=-1
+nightmin=20140501
+nightmax=20140930
+get_results
+table="AnalysisResultsRunISDC"  # ISDC
+name="Mrk501_2014_ISDC"
 get_results
 
