source: trunk/DataCheck/Tools/get_data.sh@ 19033

Last change on this file since 19033 was 19033, checked in by Daniela Dorner, 8 years ago
added tutorial, added comments, restructured query functions, added file for collaborators, reworked columns for 3 files, added zd and th for internal
  • Property svn:executable set to *
File size: 20.4 KB
Line 
1#!/bin/bash
2
3# ---------------------------------------------------------------- #
4# README README README README README README README README README #
5# ---------------------------------------------------------------- #
6# #
7# To use this script, you need #
8# - a computer with access to the FACT database in La Palma #
9# - a file with the password of a valid mysql-user #
10# - to define the setup below for #
11# a) the DB access #
12# b) the data you want to have #
13# #
14# To define the setup, search for SETUP in this script and #
15# read the details there #
16# #
17# Per data request, you get up to 3 files: #
18# *_internal.dat #
19# *_collaborators.dat #
20# *_external.dat (only if binning is 20min or nightly) #
21# #
22# Please have in mind that this started as a tool for myself, then #
23# others started using it. Also the script is not yet finalized. #
24# In case you find problems and/or have a feature request, please #
25# send and email to dorner@astro.uni-wuerzburg.de #
26# #
27# ---------------------------------------------------------------- #
28# README README README README README README README README README #
29# ---------------------------------------------------------------- #
30
31
32
33
34
35# ToDo (notes DD):
36# ----------------
37# - limit creation of file for externals to nightly and 20 min
38# - add zd, th for internal for minute binning (daily done)
39# - update function for correction
40# - update CU for QLA
41# - add CU for ISDC analysis
42# - add < 20121212 data for QLA
43# - check crab flux
44# - add E2dNdE?
45# - functionality to determine start time for seaon-binning
46# - can get_data.sh / Send_Data*.sh be combined?
47# get_data.sh should be able to run stand-alone and be kept simple for any user
48
49#
50# content of files (wish list):
51# -----------------------------
52# REMARK: keep order of columns to allow for reading with TGraph directly from file: X Y EX EY
53#
54# internal
55# --------
56# time: time, delta time, start, stop, ontime
57# flux: excrate, excerr, corrate, corerr, CU CUerr, flux, fluxerr,
58# other info on flux: signif, cu-factor, num exc, num sig, num bg
59# other info: zd th R750cor R750ref
60#
61# external (allow only 20min and nightly binning)
62# --------
63# time: time, delta time, start, stop
64# flux: excrate, excerr
65#
66# collaborators
67# -------------
68# time: time, delta time, start, stop, ontime
69# flux: excrate, excerr, corrate, corerr, flux, flux-err, significance
70#
71# additional information to put:
72# ------------------------------
73# timestamp of creation
74# query (for debugging / answering questions)
75# policy (adapted for internal/collaborators/external) [define in files to be used also by Send_Data*.sh
76#
77
78
79
80function get_results()
81{
82 # some basic query parts
83
84 # data check based on artificial trigger rate
85 #dch=" AND fR750Cor/fR750Ref >0.93 "
86 dch=" AND fR750Cor/fR750Ref BETWEEN 0.93 AND 1.3 "
87 # ontime
88 ontime1=" TIME_TO_SEC(TIMEDIFF(fRunStop,fRunStart))*fEffectiveOn "
89 ontime2=" fOnTimeAfterCuts "
90 ontimeif=" IF(ISNULL(fEffectiveOn), "$ontime2", "$ontime1") "
91 from=" FROM RunInfo LEFT JOIN "$table" USING (fNight, fRunID) "
92 # time range and source
93 where=" WHERE fSourceKey="$source" AND fNight BETWEEN "$nightmin" AND "$nightmax
94 where=$where" AND NOT ISNULL(fNumExcEvts) "
95 # some sanity checks
96 where=$where" AND fRunTypeKey=1 "
97 # zd cut
98 where=$where" AND fZenithDistanceMax < "$zdmax
99 # th cut
100 where=$where" AND fThresholdMedian < "$thmax
101 where=$where" "$dch
102
103 #
104 cufactor=" Avg(CUQLA(fNight)) "
105 crabflux="3.37e-11"
106 fluxprec=13
107 crabflux="3.37"
108 fluxprec=2
109
110 case $timeunit in
111 mjd) delta="(Mjd(MAX(fRunStop))-Mjd(Min(fRunStart)))/2"
112 start=" Mjd(Min(fRunStart)) AS start"
113 stop=" Mjd(MAX(fRunStop)) AS stop"
114 deltat=$delta" AS deltat"
115 time=" Mjd(Min(fRunStart))+"$delta" AS time"
116 delta2="(Mjd(MAX(o.stop))-Mjd(MIN(o.start)))/2"
117 start2=" Mjd(MIN(o.start)) AS start"
118 stop2=" Mjd(MAX(o.stop)) AS stop"
119 deltat2=$delta2" AS deltat"
120 time2=" Mjd(MIN(o.start))+"$delta2" AS time"
121 ;;
122 unix) delta="(Unix_timestamp(CONVERT_TZ(Max(fRunStop), '+00:00', 'SYSTEM')) - Unix_timestamp(CONVERT_TZ(Min(fRunStart), '+00:00', 'SYSTEM')))/2"
123 start="Unix_timestamp(CONVERT_TZ(Min(fRunStart), '+00:00', 'SYSTEM')) AS start"
124 stop="Unix_timestamp(CONVERT_TZ(Max(fRunStop), '+00:00', 'SYSTEM')) AS stop"
125 deltat=$delta" AS deltat"
126 time=" Unix_timestamp(CONVERT_TZ(Min(fRunStart), '+00:00', 'SYSTEM'))+"$delta" AS time"
127 delta2="(Unix_timestamp(CONVERT_TZ(Max(o.stop), '+00:00', 'SYSTEM')) - Unix_timestamp(CONVERT_TZ(Min(o.start), '+00:00', 'SYSTEM')))/2"
128 startstop2=" Unix_timestamp(CONVERT_TZ(MIN(o.start), '+00:00', 'SYSTEM')) AS start"
129 startstop2=$starstop2" Unix_timestamp(CONVERT_TZ(MAX(o.stop), '+00:00', 'SYSTEM')) AS stop"
130 deltat2=$delta2" AS deltat"
131 time2=" Unix_timestamp(CONVERT_TZ(Min(o.start), '+00:00', 'SYSTEM'))+"$delta2" AS time"
132 ;;
133 *) delta="sec_to_time(time_to_sec(timediff(MAX(fRunStop), Min(fRunStart)))/2)"
134 start=" MIN(fRunStart) AS start"
135 stop=" MAX(fRunStop) AS stop"
136 deltat=$delta" AS deltat"
137 time=" addtime(Min(fRunStart), "$delta") AS time"
138 delta2="sec_to_time(time_to_sec(timediff(MAX(o.stop), Min(o.start)))/2)"
139 start2=" MIN(o.start) AS start"
140 stop2=" MAX(o.stop) AS stop"
141 deltat2=$delta" AS deltat"
142 time2=" addtime(Min(o.start), "$delta2") AS time"
143 ;;
144 esac
145 zenith="fZenithDistance"
146 thresh="IF(ISNULL(fThresholdMinSet),fThresholdMedian,fThresholdMinSet)"
147 # thomas correction factor
148 correvts=" fNumExcEvts*(pow(cos("$zenith"Mean*PI()/180),3)+14.8/21.9*pow(sin(2*"$zenith"Mean*PI()/180),5))/((1-0.00124/1.21*("$thresh"-500)*("$thresh">=500))) "
149 excerr="ExcErr(Sum(fNumSigEvts), SUM(fNumBgEvts))"
150 CU="SUM("$correvts"/CUQLA(fNight))/SUM("$ontimeif")*3600"
151 CUerr=$excerr"/SUM("$ontimeif")*3600*SUM("$correvts"/CUQLA(fNight))/SUM(fNumExcEvts)"
152 excerr2="ExcErr(SUM(o.sigevts),SUM(o.bgevts))"
153 CU2="SUM(o.corevts/o.cu)/SUM(o.ot)*3600"
154 CUerr2=$excerr2"/SUM(o.ot)*3600*SUM(o.corevts/o.cu)/(SUM(o.sigevts)-SUM(o.bgevts))"
155
156 ontime=" ROUND(SUM("$ontimeif")/60., 1) AS ontime"
157 excrate=" ROUND(SUM(fNumExcEvts)/SUM("$ontimeif")*3600, 1) AS excrate"
158 significance="ROUND(LiMa(Sum(fNumSigEvts), SUM(fNumBgEvts)), 1) AS significance"
159 numexc="Sum(fNumExcEvts) AS numexc"
160 numsig="Sum(fNumSigEvts) AS numsig"
161 numbg="Sum(fNumBgEvts) AS numbg"
162 excrateerr=" ROUND("$excerr"/SUM("$ontimeif")*3600, 1) AS excrateerr"
163 correxcrate=" ROUND(SUM("$correvts")/SUM("$ontimeif")*3600, 1) AS correxcrate"
164 # corerr = MMath::ErrorExc(excevtssum+bgevtssum, bgevtssum*5, 0.2)/ontimesum*3600.*corrate/excrate;
165 correxcrateerr=" ROUND("$excerr"/SUM("$ontimeif")*3600*SUM("$correvts")/SUM(fNumExcEvts), 1) AS correxcrateerr"
166 # correction on run basis
167 cu=" ROUND("$CU", 2) AS cu"
168 cuerr=" ROUND("$CUerr", 2) AS cuerr"
169 flux="ROUND("$CU" * "$crabflux", 2) AS flux"
170 fluxerr="ROUND("$CUerr" * "$crabflux", 2) AS fluxerr"
171
172 ontime2=" ROUND(SUM(o.ot)/60., 1) AS ontime"
173 excrate2=" ROUND((SUM(o.sigevts)-SUM(o.bgevts))/SUM(o.ot)*3600, 1) AS excrate"
174 significance2=" ROUND(LiMa(SUM(o.sigevts),SUM(o.bgevts)), 1) AS significance"
175 numexc2="Sum(o.sigevts-o.bgevts) AS numexc"
176 numsig2="Sum(o.sigevts) AS numsig"
177 numbg2="Sum(o.bgevts) AS numbg"
178 excrateerr2=" ROUND("$excerr2"/SUM(o.ot)*3600, 1) AS excrateerr"
179 correxcrate2=" ROUND(SUM(o.corevts)/SUM(o.ot)*3600, 1) AS correxcrate"
180 correxcrateerr2=" ROUND("$excerr2"/SUM(o.ot)*3600*SUM(o.corevts)/(SUM(o.sigevts)-SUM(o.bgevts)), 1) AS correxcrateerr"
181 cu2=" ROUND("$CU2", 2) AS cu"
182 cuerr2=" ROUND("$CUerr2", 2) AS cuerr"
183 flux2="ROUND("$CU2" * "$crabflux", "$fluxprec") AS flux"
184 fluxerr2="ROUND("$CUerr2" *"$crabflux", "$fluxprec") AS fluxerr"
185
186
187 if [ $bin -le 0 ]
188 then
189 num="#bin"
190 # first part of the query
191 querystart="SELECT "
192 if [ $bin -eq 0 ]
193 then
194 querystart=$querystart" fPeriod AS num, "
195 else
196 querystart=$querystart" FLOOR((Mjd(fRunStart)-Mjd("$nightmin")-0.5)/"`echo $bin | sed -e 's/-//'`".) AS num, "
197 fi
198 querystart=$querystart" "$time", "$start", "$stop", "
199
200 # final part of the query
201 querybase=$from$where
202 querybase=$querybase" GROUP BY num "
203 if [ "$ontimelimit" = "" ]
204 then
205 querybase=$querybase" HAVING SUM("$ontimeif")>1200 ORDER BY num " # 20 min
206 else
207 querybase=$querybase" HAVING SUM("$ontimeif")>"$ontimelimit" ORDER BY num "
208 fi
209
210 # internal
211 queryint=$querystart
212 queryint=$queryint" "$excrate", "$correxcrate", "$cu", "$flux", "
213 queryint=$queryint" "$deltat", "$ontime", "
214 queryint=$queryint" "$excrateerr", "$correxcrateerr", "$cuerr", "$fluxerr", "
215 queryint=$queryint" "$significance", "
216 queryint=$queryint" Min(fNight) AS nightmin, Max(fNight) AS nightmax, "
217 queryint=$queryint" "$numexc", "$numsig", "$numbg", "
218 queryint=$queryint" Min("$zenith"Min) AS zdmin, Max("$zenith"Max) AS zdmax, "
219 queryint=$queryint" Min("$thresh") AS thmin, Max("$thresh") AS thmax "
220 queryint=$queryint" "$querybase
221
222 # for collaborators
223 querycol=$querystart
224 querycol=$querycol" "$excrate", "$correxcrate", "$cu", "$flux", "
225 querycol=$querycol" "$deltat", "$ontime", "
226 querycol=$querycol" "$excrateerr", "$correxcrateerr", "$cuerr", "$fluxerr", "
227 querycol=$querycol" "$significance
228 querycol=$querycol" "$querybase
229
230 # external
231 queryext=$querystart" "$excrate", "$deltat", "$excrateerr" "$querybase
232
233 else
234 num=
235 # first part of the query
236 querystart="SELECT "
237 querystart=$querystart" "$time2", "$start2", "$stop2", "
238
239 # final part of the query
240 querybase=" FROM (SELECT fNight, @ot:="$ontimeif" AS ot, fRunStart AS start, fRunStop AS stop, fNumSigEvts AS sigevts, fNumBgEvts AS bgevts, "
241 querybase=$querybase" "$correvts" AS corevts, CUQLA(fNight) AS cu, "
242 querybase=$querybase" IF (@night=fNight AND FLOOR((@os+@ot)/"$bin"./60.)<1, @bl, @bl := @bl + 1) AS block, "
243 querybase=$querybase" IF (@night=fNight AND FLOOR((@os+@ot)/"$bin"./60.)<1, @os:=@os + @ot, @os := @ot) AS os, @night :=fNight AS night "
244 querybase=$querybase$from" CROSS JOIN (SELECT @night :=0, @ot :=0, @os :=0, @bl:=0) PARAMS "
245 querybase=$querybase$where" ORDER BY fRunStart) o GROUP BY block HAVING ontime>0.75*"$bin" ORDER BY 'time'"
246
247 # internal
248 queryint=$querystart
249 queryint=$queryint" "$excrate2", "$correxcrate2", "$cu2", "$flux2", "
250 queryint=$queryint" "$deltat2", "$ontime2", "
251 queryint=$queryint" "$excrateerr2", "$correxcrateerr2", "$cuerr2", "$fluxerr2", "
252 queryint=$queryint" "$significance2", "
253 queryint=$queryint" avg(o.night) AS night, "
254 queryint=$queryint" "$numexc2", "$numsig2", "$numbg2" "
255 queryint=$queryint" "$querybase
256
257 # for collaborators
258 querycol=$querystart
259 querycol=$querycol" "$excrate2", "$correxcrate2", "$cu2", "$flux2", "
260 querycol=$querycol" "$deltat2", "$ontime2", "
261 querycol=$querycol" "$excrateerr2", "$correxcrateerr2", "$cuerr2", "$fluxerr2", "
262 querycol=$querycol" "$significance2
263 querycol=$querycol" "$querybase
264
265 # external
266 queryext=$querystart" "$excrate2", "$deltat2", "$ontime2", "$excrateerr2" "$querybase
267
268 fi
269
270
271 fileint=$datapath"/FACT_preliminary_"$name"_internal.dat"
272 if [ "$overwrite" = "yes" ]
273 then
274 echo "internal: "$fileint
275 echo "# This file was created at "`date` > $fileint
276 # add query and policy here
277 fi
278 headerint="# "$num" time["$timeunit"] start["$timeunit"] stop["$timeunit"] excrate[evts/h] corr.excrate[evts/h] flux[CU] flux[e-11/cm2/s] delta_time["$timeunit"] ontime[min]"
279 headerint=$headerint" excrate_err[evts/h] corr.excrate_err[evts/h] flux_err[CU] flux_err[e-11/cm2/s] significance night num_exc num_sig num_bg zdmin zdmax thmin thmax"
280 echo $headerint >> $fileint
281 #echo "$queryint"
282 mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -s -e "$queryint" >> $fileint
283 #mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -e "$queryint"
284
285
286 fileext=$datapath"/FACT_preliminary_"$name"_external.dat"
287 if [ "$overwrite" = "yes" ]
288 then
289 echo "external: "$fileext
290 echo "# This file was created at "`date` > $fileext
291 # add query and policy here
292 fi
293 headerext="# "$num" time["$timeunit"] start["$timeunit"] stop["$timeunit"] excrate[evts/h] delta_time["$timeunit"] excrate_err[evts/h] "
294 echo $headerext >> $fileext
295 #echo "$queryext"
296 mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -s -e "$queryext" >> $fileext
297 #mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -e "$queryext"
298
299 filecol=$datapath"/FACT_preliminary_"$name"_collaborators.dat"
300 if [ "$overwrite" = "yes" ]
301 then
302 echo "collaborators: "$filecol
303 echo "# This file was created at "`date` > $filecol
304 # add query and policy here
305 fi
306 headercol="# "$num" time["$timeunit"] start["$timeunit"] stop["$timeunit"] excrate[evts/h] corr.excrate[evts/h] flux[CU] flux[e-11/cm2/s] delta_time["$timeunit"] ontime[min]"
307 headercol=$headercol" excrate_err[evts/h] corr.excrate_err[evts/h] flux_err[CU] flux_err[e-11/cm2/s] significance "
308 echo $headercol >> $filecol
309 #echo "$querycol"
310 mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -s -e "$querycol" >> $filecol
311 #mysql --defaults-file=$sqlpw -u factread --host=$host $dbname -e "$querycol
312}
313
314# -------------------------------------------------------------------------------------- #
315# SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP SETUP #
316# -------------------------------------------------------------------------------------- #
317# #
318# The lines below define the basic setup for the database and give examples and #
319# explanations for the various options available. #
320# The request of the data itself is done with a smaller setup further down. #
321# #
322# -------------------------------------------------------------------------------------- #
323#
324# ----------
325# DB SETUP
326# ----------
327# path to file with mysql password
328sqlpw=/home/$USER/.mysql.pw
329# host of mysql server with FACT DB
330#host=lp-fact # ISDC
331host=10.0.100.21 # LP or LP via vpn
332#host=localhost # your local machine in case you have a copy of DB
333# name of database
334dbname=factdata
335#
336# -------------
337# BASIC SETUP
338# -------------
339# output path
340path=`dirname $0`
341datapath=$path"/data"
342# create directory for data files
343if ! [ -e $datapath ]
344then
345 mkdir $datapath
346fi
347# time unit
348#timeunit=timestamp # default
349#timeunit=unix
350timeunit=mjd
351# time binning
352# positive values: minutes
353# negative values: days
354# special case 0: period
355# for season binning choose -365 and according start date
356#bin=20 # minutes
357#bin=0 # period
358bin=-1 # nightly
359#bin=-365 # yearly
360# choose analysis
361#table="AnalysisResultsAllQLA" # N/A
362table="AnalysisResultsRunLP" # QLA
363#table="AnalysisResultsRunISDC" # ISDC
364# time range
365nightmin=20111115
366nightmax=20201231
367# defaults for zd and threshold
368zdmax=90 # all data
369thmax=1500 # all data
370# overwrite dataset file?
371# (useful to combine different binnings in one file -> set to "no")
372overwrite="yes"
373# optional: require minimal ontime per bin (default 20 min)
374#ontimelimit=30 # 30 min
375ontimelimit= # default 20 min
376
377# -------------------------------------------------------------------------------------- #
378# SETUP - GET YOUR DATA HERE - SETUP - GET YOUR DATA HERE - SETUP - GET YOUR DATA HERE #
379# -------------------------------------------------------------------------------------- #
380# #
381# Adapt the lines below to your needs. #
382# Overwrite default settings above. #
383# The data-request is sent with the line 'get_results.' #
384# Minumum setup: Define source key and name for file. #
385# The list of source keys can be found at #
386# https://fact-project.org/run_db/db/printtable.php?fTable=Source&fSortBy=fSourceKEY+ #
387# More examples can be found further down. #
388# #
389# REMARKS: #
390# - correction of effect of zd and threshold not yet finalized and only valid for QLA #
391# - no CU-conversion available for ISDC-analysis so far (that for QLA is used instead) #
392# #
393# -------------------------------------------------------------------------------------- #
394
395# 501 MAGIC
396source=2
397name="Mrk501_2014_forMAGIC"
398bin=-1
399nightmin=20140714
400nightmax=20140805
401get_results
402#bin=20
403#name="Mrk501_2014_forMAGIC20"
404#get_results
405
406
407
408# end script here
409exit
410
411
412
413#
414# more examples
415#
416
417# Mrk 421
418source=1
419name="Mrk421_nightly"
420bin=-1
421get_results
422name="Mrk421_20min"
423bin=20
424get_results
425name="Mrk421_3d"
426bin=-3
427get_results
428name="Mrk421_10d"
429bin=-10
430get_results
431name="Mrk421_period"
432bin=0
433get_results
434
435
436
437# Mrk 501
438source=2
439name="Mrk501_nightly"
440bin=-1
441get_results
442name="Mrk501_20min"
443bin=20
444get_results
445name="Mrk501_3d"
446bin=-3
447get_results
448name="Mrk501_10d"
449bin=-10
450get_results
451name="Mrk501_period"
452bin=0
453get_results
454
455
456
457# 2344
458source=3
459name="2344_nightly"
460bin=-1
461get_results
462name="2344_20min"
463bin=20
464get_results
465name="2344_period"
466bin=0
467get_results
468
469
470
471# 1959
472source=7
473name="1959_nightly"
474bin=-1
475get_results
476name="1959_20min"
477bin=20
478get_results
479name="1959_period"
480bin=0
481get_results
482
483
484
485# 0323
486source=12
487name="0323_nightly"
488bin=-1
489get_results
490name="0323_20min"
491bin=20
492get_results
493name="0323_period"
494bin=0
495get_results
496
497
498
499# crab
500source=5
501name="Crab_nightly"
502bin=-1
503get_results
504name="Crab_20min"
505bin=20
506get_results
507name="Crab_period"
508bin=0
509get_results
510name="Crab_season"
511bin=-365
512nightmin=20110716
513nightmax=20180716
514get_results
515
516
517
518name="1959_2016"
519source=7
520bin=-1
521nightmin=20160201
522nightmax=20161105
523get_results
524
525name="1959_all_variable"
526overwrite="no"
527source=7
528bin=-365
529nightmin=20120201
530nightmax=20130131
531get_results
532nightmin=20130201
533nightmax=20140131
534get_results
535nightmin=20140201
536nightmax=20150131
537get_results
538bin=0
539nightmin=20150201
540nightmax=20160131
541get_results
542bin=-1
543nightmin=20160201
544nightmax=20170131
545get_results
546bin=0
547nightmin=20170201
548nightmax=20180131
549get_results
550
551
552
553overwrite="yes"
554name="1959_all_variable2"
555overwrite="no"
556source=7
557bin=-365
558nightmin=20120201
559nightmax=20130131
560get_results
561nightmin=20130201
562nightmax=20140131
563get_results
564nightmin=20140201
565nightmax=20150131
566get_results
567bin=0
568nightmin=20150201
569nightmax=20160131
570get_results
571bin=-1
572nightmin=20160201
573nightmax=20160817
574get_results
575bin=0
576nightmin=20160818
577nightmax=20180131
578get_results
579
580
581
582overwrite="yes"
583bin=0
584source=3
585name="2344period"
586get_results
587
588
589
590# flare night (HESS)
591name="Mrk501_10min_flarenight"
592source=2
593bin=10
594nightmin=20140623
595nightmax=20140623
596get_results
597
598
599
600# flare night (HESS)
601name="Mrk501_5min_flarenight"
602source=2
603bin=5
604nightmin=20140623
605nightmax=20140623
606get_results
607
608
609
610
611# full sample
612name="Mrk421_all_nightly"
613source=1
614get_results
615
616name="Mrk501_all_nightly"
617source=2
618get_results
619
620name="1959_all_nightly"
621source=7
622get_results
623
624name="2344_all_nightly"
625source=3
626get_results
627
628
629
630name="HESE20160427"
631source=19
632nightmin=20160425
633bin=-10
634get_results
635
636name="AMON20160731"
637source=21
638nightmin=20160730
639bin=-10
640get_results
641
642
643
Note: See TracBrowser for help on using the repository browser.