Ignore:
Timestamp:
06/28/18 13:05:39 (6 years ago)
Author:
Daniela Dorner
Message:
added readme
File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/DataCheck/Tools/get_data.sh

    r19031 r19032  
    11#!/bin/bash
    22
    3 # todo
     3# ----------------------------------------------------------------
     4#  README README README README README README README README README
     5# ----------------------------------------------------------------
     6#                                                                #
     7# To use this script, you need                                   #
     8#   - a computer with access to the FACT database in La Palma    #
     9#   - a file with the password of a valid mysql-user             #
     10#   - to define the setup below for                              #
     11#     a) the DB access                                           #
     12#     b) the data you want to have                               #
     13#                                                                #
     14# To define the setup, search for SETUP in this script and       #
     15#   read the details there (starting roughly line 295)           #
     16#                                                                #
     17# Per data request, you get up to 3 files:                       #
     18#   *_internal.dat                                               #
     19#   *_collaborators.dat                                          #
     20#   *_external.dat (only if binning is 20min or nightly)         #
     21#                                                                #
     22# Please have in mind that this started as a tool for myself,    #
     23#  then others started using it. Also the script is not yet      #
     24#  finalized. In case you find problems and/or have a feature    #
     25#  request, please send and email to dorner@astro.uni-wuerzburg.de                                 #
     26#                                                                #
     27# ----------------------------------------------------------------
     28#  README README README README README README README README README
     29# ----------------------------------------------------------------
     30
     31
     32
     33
     34
     35# ToDo (notes DD):
     36# ----------------
     37# - add file for collaborators
     38# - update columns and content for 3 types of files
     39# - limit creation of file for externals to
    440# - update function for correction
    541# - update CU for QLA
     
    945# - check crab flux
    1046# - add E2dNdE?
     47# - functionality to determine start time for seaon-binning
     48# - can get_data.sh / Send_Data*.sh be combined?
     49#   get_data.sh should be able to run stand-alone and be kept simple for any user
     50
     51#
     52# content of files (wish list):
     53# -----------------------------
     54# REMARK: keep order of columns to allow for reading with TGraph directly from file: X Y EX EY
     55#
     56# internal
     57# --------
     58# time: time, delta time, start, stop, ontime
     59# flux: excrate, excerr, corrate, corerr, CU CUerr, flux, fluxerr,
     60# other info on flux: signif, cu-factor, num exc, num sig, num bg
     61# other info: zd th R750cor R750ref
     62#
     63# external (allow only 20min and nightly binning)
     64# --------
     65# time: time, delta time, start, stop
     66# flux: excrate, excerr
     67#
     68# collaborators
     69# -------------
     70# time: time, delta time, start, stop, ontime
     71# flux: excrate, excerr, corrate, corerr, flux, flux-err, significance
     72#
     73# additional information to put:
     74# ------------------------------
     75# timestamp of creation
     76# query (for debugging / answering questions)
     77# policy (adapted for internal/collaborators/external) [define in files to be used also by Send_Data*.sh
     78#
     79
     80
    1181
    1282function get_results()
     
    33103   where=$where" "$dch
    34104   
    35    cufactor=" Avg(25.2) "
     105   #
     106   cufactor=" Avg(CUQLA(fNight)) "
    36107   crabflux="3.37e-11"
    37108   fluxprec=13
     
    108179   fluxerr2="$cuerr2*"$crabflux
    109180
    110 # internal
    111 # --------
    112 # timeselect:
    113 #  mjdstar, mjdstop, mjdmean, ontime
    114 # excselect:
    115 #  excrate, excerr
    116 #  corrected: excrate, excerr
    117 #  CU CUerr
    118 #  flux, fluxerr
    119 # addselect:
    120 #  signif
    121 #  num exc, num sig, num bg
    122 # other info: zd? th?
    123 #
    124 #
    125 # external
    126 # --------
    127 # time, delta time, start, stop
    128 # corr-excrate, corr-excerr
    129 # flux, flux-err
    130181   
    131182   if [ $bin -le 0 ]
     
    242293}
    243294
    244 # setup
    245 # db
    246 sqlpw=/home/$USER/.mysql.pw # file with mysql credentials
    247 #host=lp-fact
    248 host=10.0.100.21
    249 #host=localhost
    250 dbname=factdata # name of database
     295# SETUP:
     296# ------
     297# DB SETUP:
     298# ---------
     299# path to file with mysql password
     300sqlpw=/home/$USER/.mysql.pw
     301# host of mysql server with FACT DB
     302#host=lp-fact # ISDC
     303host=10.0.100.21 # LP or LP via vpn
     304#host=localhost # your local machine in case you have a copy of DB
     305# name of database
     306dbname=factdata
    251307# defaults for zd and threshold
    252308zdmax=90 # all data
    253309thmax=1500 # all data
     310#
     311# SETUP for your data:
     312# --------------------
    254313# output path
    255314path=`dirname $0`
    256315datapath=$path"/data"
     316# create directory for data files
    257317if ! [ -e $datapath ]
    258318then
     
    264324timeunit=mjd
    265325# time binning
    266 # positive values: minutes
    267 # negative values: days
    268 # special case 0: period
    269 # for season binning choose -365 and according start date
     326#  positive values: minutes
     327#  negative values: days
     328#  special case 0: period
     329#  for season binning choose -365 and according start date
    270330#bin=20 # minutes
    271331#bin=0 # period
     
    288348# 501 MAGIC
    289349source=2
    290 name="Mrk501_2014JulAug"
    291 bin=-1
    292 nightmin=20140714
    293 nightmax=20140805
     350name="Mrk501_2014_QLA"
     351bin=-1
     352nightmin=20140501
     353nightmax=20140930
     354get_results
     355table="AnalysisResultsRunISDC"  # ISDC
     356name="Mrk501_2014_ISDC"
    294357get_results
    295358
Note: See TracChangeset for help on using the changeset viewer.