source: fact/tools/pyscripts/pyfact/pyfact.py@ 14912

Last change on this file since 14912 was 14469, checked in by neise, 12 years ago
added UnixTimeUTC member to RawData class - called unixtime - seconds since 01.01.1970
  • Property svn:executable set to *
File size: 36.2 KB
Line 
1#!/usr/bin/python -tt
2#
3# Werner Lustermann, Dominik Neise
4# ETH Zurich, TU Dortmund
5#
6from ctypes import *
7import numpy as np
8import pprint # for SlowData
9from scipy import signal
10
11# get the ROOT stuff + my shared libs
12from ROOT import gSystem
13# factfits_h.so is made from factfits.h and is used to access the data
14# make sure the location of factfits_h.so is in LD_LIBRARY_PATH.
15# having it in PYTHONPATH is *not* sufficient
16gSystem.Load('factfits_h.so')
17gSystem.Load('calfactfits_h.so')
18from ROOT import *
19
20class RawDataFeeder( object ):
21 """ Wrapper class for RawData class
22 capable of iterating over multiple RawData Files
23 """
24
25 def __init__(self, filelist):
26 """ *filelist* list of files to iterate over
27 the list should contain tuples, or sublists of two filenames
28 the first should be a data file (\*.fits.gz)
29 the second should be an amplitude calibration file(\*.drs.fits.gz)
30 """
31
32 self.__module__ = 'pyfact'
33
34 # sanity check for input
35 if type(filelist) != type(list()):
36 raise TypeError('filelist should be a list')
37 for entry in filelist:
38 if len(entry) != 2:
39 raise TypeError('the entries of filelist should have length == 2')
40 for path in entry:
41 if type(path) != type(str()):
42 raise TypeError('the entries of filelist should be path, i.e. of type str()')
43 #todo check if 'path' is a valid path
44 # else: throw an Exception, or Warning?
45
46 self.filelist = filelist
47 self._current_RawData = RawData(filelist[0][0], filelist[0][1], return_dict=True)
48 del filelist[0]
49
50 def __iter__(self):
51 return self
52
53 def next():
54 """ Method being called by the iterator.
55 Since the RawData Objects are simply looped over, the event_id from the
56 RawData object will not be unique.
57 Each RawData obejct will start with event_id = 1 as usual.
58 """
59 try:
60 return self._current_RawData.next()
61 except StopIteration:
62 # current_RawData was completely processed
63 # delete it (I hope this calls the destructor of the fits file and/or closes it)
64 del self._current_RawData
65 # and remake it, if possible
66 if len(self.filelist) > 0:
67 self._current_RawData = RawData(filelist[0][0], filelist[0][1], return_dict=True)
68 del filelist[0]
69 else:
70 raise
71
72
73
74class RawData( object ):
75 """ raw data access and calibration
76
77 class is **iterable**
78
79 - open raw data file and drs calibration file
80 - performs amplitude calibration
81 - performs baseline substraction if wanted
82 - provides all data in an array:
83 row = number of pixel
84 col = length of region of interest
85
86 """
87
88
89 def __init__(self, data_file_name, calib_file_name,
90 baseline_file_name='',
91 return_dict = True,
92 use_CalFactFits = True,
93 do_calibration = True,
94 user_action_calib=lambda acal_data, data, blm, tom, gm, scells, nroi: None):
95 """ -constructor-
96
97 - open data file and calibration data file
98 - get basic information about the data in data_file_name
99 - allocate buffers for data access
100
101 *data_file_name* : fits or fits.gz file of the data including the path
102
103 *calib_file_name* : fits or fits.gz file containing DRS calibration data
104
105 *baseline_file_name* : npy file containing the baseline values
106
107 *return_dict* : this option will be removed in future releases.
108 formerly the next() method returned only a subset of (important) event information,
109 and it was not transparent how to retrieve the other (less important) information.
110 Nowadays next() returns self.__dict__ which contains everything we were able to find in the fits file.
111
112 *use_CalFactFits* : formerly the DRS amplitude calibration was
113 implemented in python. But for performance reasons this was now moved into
114 a C++ class called CalFactFits. For test purposes, this option can be set to
115 False, but this is not really maintained anymore. If DRS the DRS calibration algorithm is
116 beeing updated in C++ it may not be updated in the python implementation.
117
118 *do_calibration* : In case *use_CalFactFits* is False, one may choose
119 not to calibrate the data at all, thus safe quite some time.
120 This is imho only needed in case one is interesting in learning something about the
121 calibration algorithm itself.
122
123 *user_action_calib* : callback function, intended for tests of the DRS calibration algorithm.
124 but since this is not done in the Python regime anymore, this function is never called.
125 (depending on *use_CalFactFits* of course)
126 """
127 self.__module__='pyfact'
128 # manual implementation of default value, but I need to find out
129 # if the user of this class is aware of the new option
130 if return_dict == False:
131 print 'DEPRECATION WARNING:'
132 print 'you are using RawData in a way, which is nor supported anymore.'
133 print ' Please set: return_dict = True, in the __init__ call'
134 self.return_dict = return_dict
135 self.use_CalFactFits = use_CalFactFits
136
137 self.do_calibration = do_calibration
138
139 self.data_file_name = data_file_name
140 self.calib_file_name = calib_file_name
141 self.baseline_file_name = baseline_file_name
142
143 self.user_action_calib = user_action_calib
144
145 # baseline correction: True / False
146 if len(baseline_file_name) == 0:
147 self.correct_baseline = False
148 else:
149 self.correct_baseline = True
150
151
152 # access data file
153 if use_CalFactFits:
154 try:
155 data_file = CalFactFits(data_file_name, calib_file_name)
156 except IOError:
157 print 'problem accessing data file: ', data_file_name
158 raise # stop ! no data
159
160 #: either CalFactFits object or FactFits object, depending on *use_CalFactFits*
161 self.data_file = data_file
162 #: 1440x300 nparray containing the event data. pixel sorted according to CHID
163 self.data = np.empty( data_file.npix * data_file.nroi, np.float64)
164 data_file.SetNpcaldataPtr(self.data)
165 self.data = self.data.reshape( data_file.npix, data_file.nroi )
166 #: copy of data. here for historical reasons
167 self.acal_data = self.data
168 #: region of interest. (number of DRS slices read).
169 # for FACT data mostly 300. for special runs sometimes 1024.
170 self.nroi = data_file.nroi
171 #: number of Pixel in FACT. should be 1440
172 self.npix = data_file.npix
173 #: the total number of events in the data_file
174 self.nevents = data_file.nevents
175
176 # Data per event
177 #: starting at 1
178 self.event_id = None
179
180 #: data=4 ; the rest I don't know by heart .. should be documented here :-)
181 self.trigger_type = None
182 #self.start_cells = None
183 #self.board_times = None
184 #: slice where drs readout started for all DRS chips (160) .. but enlarged to the size of 1440 pixel. thus there are always 9 equal numbers inside.
185 self.start_cells = np.zeros( self.npix, np.int16 )
186 #: each FAD has an onboard clock running from startup time. Currently I don't know the time unit. However this is an array of 40 times, since we have 40 boards.
187 self.board_times = np.zeros( 40, np.int32 )
188 self._unixtime_tuple = np.zeros( 2, np.int32 )
189 self.unixtime = None
190
191 # data_file is a CalFactFits object
192 # data_file.datafile is one of the two FactFits objects hold by a CalFactFits.
193 # sorry for the strange naming ..
194 data_file.datafile.SetPtrAddress('StartCellData', self.start_cells)
195 data_file.datafile.SetPtrAddress('BoardTime', self.board_times)
196 data_file.datafile.SetPtrAddress('UnixTimeUTC', self._unixtime_tuple)
197
198
199 else:
200 try:
201 data_file = FactFits(self.data_file_name)
202 except IOError:
203 print 'problem accessing data file: ', data_file_name
204 raise # stop ! no data
205
206 self.data_file = data_file
207
208 # get basic information about the data file
209 self.nroi = data_file.GetUInt('NROI')
210 self.npix = data_file.GetUInt('NPIX')
211 self.nevents = data_file.GetNumRows()
212
213 # allocate the data memories
214 self.event_id = c_ulong()
215 self.trigger_type = c_ushort()
216 self.data = np.zeros( self.npix * self.nroi, np.int16 ).reshape(self.npix ,self.nroi)
217 self.start_cells = np.zeros( self.npix, np.int16 )
218 self.board_times = np.zeros( 40, np.int32 )
219 self._unixtime_tuple = np.zeros(2, np.int32 )
220
221 # set the pointers to the data++
222 data_file.SetPtrAddress('EventNum', self.event_id)
223 data_file.SetPtrAddress('TriggerType', self.trigger_type)
224 data_file.SetPtrAddress('StartCellData', self.start_cells)
225 data_file.SetPtrAddress('Data', self.data)
226 data_file.SetPtrAddress('BoardTime', self.board_times)
227 data_file.SetPtrAddress('UnixTimeUTC', self._unixtime_tuple)
228
229 # open the calibration file
230 try:
231 calib_file = FactFits(self.calib_file_name)
232 except IOError:
233 print 'problem accessing calibration file: ', calib_file_name
234 raise
235 #: drs calibration file
236 self.calib_file = calib_file
237
238 baseline_mean = calib_file.GetN('BaselineMean')
239 gain_mean = calib_file.GetN('GainMean')
240 trigger_offset_mean = calib_file.GetN('TriggerOffsetMean')
241
242 self.Nblm = baseline_mean / self.npix
243 self.Ngm = gain_mean / self.npix
244 self.Ntom = trigger_offset_mean / self.npix
245
246 self.blm = np.zeros(baseline_mean, np.float32).reshape(self.npix , self.Nblm)
247 self.gm = np.zeros(gain_mean, np.float32).reshape(self.npix , self.Ngm)
248 self.tom = np.zeros(trigger_offset_mean, np.float32).reshape(self.npix , self.Ntom)
249
250 calib_file.SetPtrAddress('BaselineMean', self.blm)
251 calib_file.SetPtrAddress('GainMean', self.gm)
252 calib_file.SetPtrAddress('TriggerOffsetMean', self.tom)
253 calib_file.GetRow(0)
254
255 # make calibration constants double, so we never need to roll
256 self.blm = np.hstack((self.blm, self.blm))
257 self.gm = np.hstack((self.gm, self.gm))
258 self.tom = np.hstack((self.tom, self.tom))
259
260 self.v_bsl = np.zeros(self.npix) # array of baseline values (all ZERO)
261
262 def __iter__(self):
263 """ iterator """
264 return self
265
266 def next(self):
267 """ used by __iter__
268
269 returns self.__dict__
270 """
271 if self.use_CalFactFits:
272 if self.data_file.GetCalEvent() == False:
273 raise StopIteration
274 else:
275 self.event_id = self.data_file.event_id
276 self.trigger_type = self.data_file.event_triggertype
277 #self.start_cells = self.data_file.event_offset
278 #self.board_times = self.data_file.event_boardtimes
279 #self.acal_data = self.data.copy().reshape(self.data_file.npix, self.data_file.nroi)
280
281 self.unixtime = self._unixtime_tuple[0] + self._unixtime_tuple[1]/1.e6
282
283 else:
284 if self.data_file.GetNextRow() == False:
285 raise StopIteration
286 else:
287 if self.do_calibration == True:
288 self.calibrate_drs_amplitude()
289
290 #print 'nevents = ', self.nevents, 'event_id = ', self.event_id.value
291 if self.return_dict:
292 return self.__dict__
293 else:
294 return self.acal_data, self.start_cells, self.trigger_type.value
295
296 def next_event(self):
297 """ ---- DEPRICATED ----
298
299 load the next event from disk and calibrate it
300 """
301 if self.use_CalFactFits:
302 self.data_file.GetCalEvent()
303 else:
304 self.data_file.GetNextRow()
305 self.calibrate_drs_amplitude()
306
307 def calibrate_drs_amplitude(self):
308 """ --- DEPRICATED ---
309
310 since the DRS calibration is done by the C++ class CalFactFits
311
312 perform the drs amplitude calibration of the event data
313 """
314 # shortcuts
315 blm = self.blm
316 gm = self.gm
317 tom = self.tom
318
319 to_mV = 2000./4096.
320 #: 2D array with amplitude calibrated dat in mV
321 acal_data = self.data * to_mV # convert ADC counts to mV
322
323
324 for pixel in range( self.npix ):
325 #shortcuts
326 sc = self.start_cells[pixel]
327 roi = self.nroi
328 # rotate the pixel baseline mean to the Data startCell
329 acal_data[pixel,:] -= blm[pixel,sc:sc+roi]
330 # the 'trigger offset mean' does not need to be rolled
331 # on the contrary, it seems there is an offset in the DRS data,
332 # which is related to its distance to the startCell, not to its
333 # distance to the beginning of the physical pipeline in the DRS chip
334 acal_data[pixel,:] -= tom[pixel,0:roi]
335 # rotate the pixel gain mean to the Data startCell
336 acal_data[pixel,:] /= gm[pixel,sc:sc+roi]
337
338
339 self.acal_data = acal_data * 1907.35
340
341 self.user_action_calib( self.acal_data,
342 np.reshape(self.data, (self.npix, self.nroi) ), blm, tom, gm, self.start_cells, self.nroi)
343
344
345 def baseline_read_values(self, file, bsl_hist='bsl_sum/hplt_mean'):
346 """
347 open ROOT file with baseline histogram and read baseline values
348
349 *file* : name of the root file
350
351 *bsl_hist* : path to the histogram containing the basline values
352 """
353
354 try:
355 f = TFile(file)
356 except:
357 print 'Baseline data file could not be read: ', file
358 return
359
360 h = f.Get(bsl_hist)
361
362 for i in range(self.npix):
363 self.v_bsl[i] = h.GetBinContent(i+1)
364
365 f.Close()
366
367 def baseline_correct(self):
368 """ subtract baseline from the data
369
370 DN 08.06.2011: I didn't use this function at all so far... don't know how well it works.
371 """
372
373 for pixel in range(self.npix):
374 self.acal_data[pixel,:] -= self.v_bsl[pixel]
375
376 def info(self):
377 """ print run information
378
379 not very well implemented ... we need more info here.
380 """
381 print 'data file: ', self.data_file_name
382 print 'calib file: ', self.calib_file_name
383 print '... we need more information printed here ... '
384
385# -----------------------------------------------------------------------------
386class RawDataFake( object ):
387 """ raw data FAKE access similar to real RawData access
388
389 DO NOT USE ... its not working
390 """
391
392
393 def __init__(self, data_file_name, calib_file_name,
394 user_action_calib=lambda acal_data, data, blm, tom, gm, scells, nroi: None,
395 baseline_file_name=''):
396 self.__module__='pyfact'
397
398 self.nroi = 300
399 self.npix = 9
400 self.nevents = 1000
401
402 self.simulator = None
403
404 self.time = np.ones(1024) * 0.5
405
406
407 self.event_id = c_ulong(0)
408 self.trigger_type = c_ushort(4)
409 self.data = np.zeros( self.npix * self.nroi, np.int16 ).reshape(self.npix ,self.nroi)
410 self.start_cells = np.zeros( self.npix, np.int16 )
411 self.board_times = np.zeros( 40, np.int32 )
412 def __iter__(self):
413 """ iterator """
414 return self
415
416 def next(self):
417 """ used by __iter__ """
418 self.event_id = c_ulong(self.event_id.value + 1)
419 self.board_times = self.board_times + 42
420
421 if self.event_id.value >= self.nevents:
422 raise StopIteration
423 else:
424 self._make_event_data()
425
426 return self.__dict__
427
428 def _make_event_data(self):
429 sample_times = self.time.cumsum() - time[0]
430
431 # random start cell
432 self.start_cells = np.ones( self.npix, np.int16 ) * np.random.randint(0,1024)
433
434 starttime = self.start_cells[0]
435
436 signal = self._std_sinus_simu(sample_times, starttime)
437
438 data = np.vstack( (signal,signal) )
439 for i in range(8):
440 data = np.vstack( (data,signal) )
441
442 self.data = data
443
444 def _std_sinus_simu(self, times, starttime):
445 period = 10 # in ns
446
447 # give a jitter on starttime
448 starttime = np.random.normal(startime, 0.05)
449
450 phase = 0.0
451 signal = 10 * np.sin(times * 2*np.pi/period + starttime + phase)
452
453 # add some noise
454 noise = np.random.normal(0.0, 0.5, signal.shape)
455 signal += noise
456 return signal
457
458 def info(self):
459 """ print run information
460
461 """
462
463 print 'data file: ', data_file_name
464 print 'calib file: ', calib_file_name
465 print 'calibration file'
466 print 'N baseline_mean: ', self.Nblm
467 print 'N gain mean: ', self.Ngm
468 print 'N TriggeroffsetMean: ', self.Ntom
469
470# -----------------------------------------------------------------------------
471import ctypes
472
473class SlowData( object ):
474 """ -Fact SlowData File-
475
476 A Python wrapper for the fits-class implemented in factfits.h
477 provides easy access to the fits file meta data.
478
479 * dictionary of file metadata - self.meta
480 * dict of table metadata - self.columns
481 * variable table column access, thus possibly increased speed while looping
482 """
483 def __del__(self):
484 del self.f
485
486 def __init__(self, path):
487 """ creates meta and columns dictionaries
488 """
489 import os
490
491 if not os.path.exists(path):
492 raise IOError(path+' was not found')
493 self.path = path
494 self.__module__ = 'pyfact'
495 try:
496 self.f = FactFits(path)
497 except IOError:
498 print 'problem accessing data file: ', data_file_name
499 raise # stop ! no data
500
501 self.meta = self._make_meta_dict()
502 self.columns = self._make_columns_dict()
503
504 self._treat_meta_dict()
505
506
507 # list of columns, which are already registered
508 # see method register()
509 self._registered_cols = []
510 # dict of column data, this is used, in order to be able to remove
511 # the ctypes of
512 self._table_cols = {}
513
514 # I need to count the rows, since the normal loop mechanism seems not to work.
515 self._current_row = 0
516
517 self.stacked_cols = {}
518
519 def _make_meta_dict(self):
520 """ This method retrieves meta information about the fits file and
521 stores this information in a dict
522 return: dict
523 key: string - all capital letters
524 value: tuple( numerical value, string comment)
525 """
526 # abbreviation
527 f = self.f
528
529 # intermediate variables for file metadata dict generation
530
531 keys=f.GetPy_KeyKeys()
532 values=f.GetPy_KeyValues()
533 comments=f.GetPy_KeyComments()
534 types=f.GetPy_KeyTypes()
535
536 if len(keys) != len(values):
537 raise TypeError('len(keys)',len(keys),' != len(values)', len(values))
538 if len(keys) != len(types):
539 raise TypeError('len(keys)',len(keys),' != len(types)', len(types))
540 if len(keys) != len(comments):
541 raise TypeError('len(keys)',len(keys),' != len(comments)', len(comments))
542
543 meta_dict = {}
544 for i in range(len(keys)):
545 type = types[i]
546 if type == 'I':
547 value = int(values[i])
548 elif type == 'F':
549 value = float(values[i])
550 elif type == 'B':
551 if values[i] == 'T':
552 value = True
553 elif values[i] == 'F':
554 value = False
555 else:
556 raise TypeError("meta-type is 'B', but meta-value is neither 'T' nor 'F'. meta-value:",values[i])
557 elif type == 'T':
558 value = values[i]
559 else:
560 raise TypeError("unknown meta-type: known meta types are: I,F,B and T. meta-type:",type)
561 meta_dict[keys[i]]=(value, comments[i])
562 return meta_dict
563
564
565 def _make_columns_dict(self):
566 """ This method retrieves information about the columns
567 stored inside the fits files internal binary table.
568 returns: dict
569 key: string column name -- all capital letters
570 values: tuple(
571 number of elements in table field - integer
572 size of element in bytes -- this is not really interesting for any user
573 might be ommited in future versions
574 type - a single character code -- should be translated into
575 a comrehensible word
576 unit - string like 'mV' or 'ADC count'
577 """
578 # abbreviation
579 f = self.f
580
581 # intermediate variables for file table-metadata dict generation
582 keys=f.GetPy_ColumnKeys()
583 #offsets=self.GetPy_ColumnOffsets() #not needed on python level...
584 nums=f.GetPy_ColumnNums()
585 sizes=f.GetPy_ColumnSizes()
586 types=f.GetPy_ColumnTypes()
587 units=f.GetPy_ColumnUnits()
588
589 # zip the values
590 values = zip(nums,sizes,types,units)
591 # create the columns dictionary
592 columns = dict(zip(keys ,values))
593 return columns
594
595 def stack(self, on=True):
596 self.next()
597 for col in self._registered_cols:
598 if isinstance( self.dict[col], type(np.array('')) ):
599 self.stacked_cols[col] = self.dict[col]
600 else:
601# elif isinstance(self.dict[col], ctypes._SimpleCData):
602 self.stacked_cols[col] = np.array(self.dict[col])
603# else:
604# raise TypeError("I don't know how to stack "+col+". It is of type: "+str(type(self.dict[col])))
605
606 def register(self, col_name):
607 """ register for a column in the fits file
608
609 after the call, this SlowData object will have a new member variable
610 self.col_name, if col_name is a key in self.colums
611
612 the value will be updated after each call of next(), or while iterating over self.
613 NB: the initial value is zero(s)
614
615 *col_name* : name of a key in self.columns, or 'all' to choose all.
616 """
617 columns = self.columns
618 if col_name.lower() == 'all':
619 for col in columns:
620 self._register(col)
621 else:
622 #check if colname is in columns:
623 if col_name not in columns:
624 error_msg = 'colname:'+ col_name +' is not a column in the binary table.\n'
625 error_msg+= 'possible colnames are\n'
626 for key in columns:
627 error_msg += key+' '
628 raise KeyError(error_msg)
629 else:
630 self._register(col_name)
631
632 # 'private' method, do not use
633 def _register( self, colname):
634
635 columns = self.columns
636 f = self.f
637 local = None
638
639 number_of_elements = int(columns[colname][0])
640 size_of_elements_in_bytes = int(columns[colname][1])
641 ctypecode_of_elements = columns[colname][2]
642 physical_unit_of_elements = columns[colname][3]
643
644 # snippet from the C++ source code, or header file to be precise:
645 #case 'L': gLog << "bool(8)"; break;
646 #case 'B': gLog << "byte(8)"; break;
647 #case 'I': gLog << "short(16)"; break;
648 #case 'J': gLog << "int(32)"; break;
649 #case 'K': gLog << "int(64)"; break;
650 #case 'E': gLog << "float(32)"; break;
651 #case 'D': gLog << "double(64)"; break;
652
653
654
655 # the fields inside the columns can either contain single numbers,
656 # or whole arrays of numbers as well.
657 # we treat single elements differently...
658 if number_of_elements == 0:
659 return
660 if number_of_elements == 1:
661 # allocate some memory for a single number according to its type
662 if ctypecode_of_elements == 'J': # J is for a 4byte int, i.e. an unsigned long
663 local = ctypes.c_ulong()
664 un_c_type = long
665 elif ctypecode_of_elements == 'I': # I is for a 2byte int, i.e. an unsinged int
666 local = ctypes.c_ushort()
667 un_c_type = int
668 elif ctypecode_of_elements == 'B': # B is for a byte
669 local = ctypes.c_ubyte()
670 un_c_type = int
671 elif ctypecode_of_elements == 'D':
672 local = ctypes.c_double()
673 un_c_type = float
674 elif ctypecode_of_elements == 'E':
675 local = ctypes.c_float()
676 un_c_type = float
677 elif ctypecode_of_elements == 'A':
678 local = ctypes.c_uchar()
679 un_c_type = chr
680 elif ctypecode_of_elements == 'K':
681 local = ctypes.c_ulonglong()
682 un_c_type = long
683 else:
684 raise TypeError('unknown ctypecode_of_elements:',ctypecode_of_elements)
685 else:
686 if ctypecode_of_elements == 'B': # B is for a byte
687 nptype = np.int8
688 elif ctypecode_of_elements == 'A': # A is for a char .. but I don't know how to handle it
689 nptype = np.int8
690 elif ctypecode_of_elements == 'I': # I is for a 2byte int
691 nptype = np.int16
692 elif ctypecode_of_elements == 'J': # J is for a 4byte int
693 nptype = np.int32
694 elif ctypecode_of_elements == 'K': # B is for a byte
695 nptype = np.int64
696 elif ctypecode_of_elements == 'E': # B is for a byte
697 nptype = np.float32
698 elif ctypecode_of_elements == 'D': # B is for a byte
699 nptype = np.float64
700 else:
701 raise TypeError('unknown ctypecode_of_elements:',ctypecode_of_elements)
702 local = np.zeros( number_of_elements, nptype)
703
704 # Set the Pointer Address
705 try:
706 f.SetPtrAddress(colname, local)
707 except TypeError:
708 print 'something was wrong with SetPtrAddress()'
709 print 'Type of colname', type(colname)
710 print 'colname:', colname
711 print 'Type of local', type(local)
712 print 'length of local', len(local)
713 print 'local should be alle zeros, since "local = np.zeros( number_of_elements, nptype)" '
714 raise
715
716 self._table_cols[colname] = local
717 if number_of_elements > 1:
718 self.__dict__[colname] = local
719 self.dict[colname] = local
720 else:
721 # remove any traces of ctypes:
722 self.__dict__[colname] = local.value
723 self.dict[colname] = local.value
724 self._registered_cols.append(colname)
725
726
727 def _treat_meta_dict(self):
728 """make 'interesting' meta information available like normal members.
729 non interesting are:
730 TFORM, TUNIT, and TTYPE
731 since these are available via the columns dict.
732 """
733
734 self.number_of_rows = self.meta['NAXIS2'][0]
735 self.number_of_columns = self.meta['TFIELDS'][0]
736
737 # there are some information in the meta dict, which are alsways there:
738 # there are regarded as not interesting:
739 uninteresting_meta = {}
740 uninteresting_meta['arraylike'] = {}
741 uninteresting = ['NAXIS', 'NAXIS1', 'NAXIS2',
742 'TFIELDS',
743 'XTENSION','EXTNAME','EXTREL',
744 'BITPIX', 'PCOUNT', 'GCOUNT',
745 'ORIGIN',
746 'PACKAGE', 'COMPILED', 'CREATOR',
747 'TELESCOP','TIMESYS','TIMEUNIT','VERSION']
748 for key in uninteresting:
749 if key in self.meta:
750 uninteresting_meta[key]=self.meta[key]
751 del self.meta[key]
752
753 # the table meta data contains
754
755
756 # shortcut to access the meta dict. But this needs to
757 # be cleaned up quickly!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
758 meta = self.meta
759
760 # loop over keys:
761 # * try to find array-like keys
762 arraylike = {}
763 singlelike = []
764 for key in self.meta:
765 stripped = key.rstrip('1234567890')
766 if stripped == key:
767 singlelike.append(key)
768 else:
769 if stripped not in arraylike:
770 arraylike[stripped] = 0
771 else:
772 arraylike[stripped] += 1
773 newmeta = {}
774 for key in singlelike:
775 newmeta[key.lower()] = meta[key]
776 for key in arraylike:
777 uninteresting_meta['arraylike'][key.lower()] = []
778 for i in range(arraylike[key]+1):
779 if key+str(i) in meta:
780 uninteresting_meta['arraylike'][key.lower()].append(meta[key+str(i)])
781 self.ui_meta = uninteresting_meta
782 # make newmeta self
783 for key in newmeta:
784 self.__dict__[key]=newmeta[key]
785
786 dict = self.__dict__.copy()
787 del dict['meta']
788 del dict['ui_meta']
789 self.dict = dict
790
791 def __iter__(self):
792 """ iterator """
793 return self
794
795 def next(self):
796 """ use to iterate over the file
797
798 do not forget to call register() before iterating over the file
799 call show() in order to find out, what parameters register() accepts.
800 or just call register('all') in case you are unsure.
801
802 returns self
803 """
804 # abbreviaition
805 f = self.f
806
807 # Here one might check, if looping makes any sense, and if not
808 # one could stop looping or so...
809 # like this:
810 #
811 # if len(self._registered_cols) == 0:
812 # print 'warning: looping without any registered columns'
813 if self._current_row < self.number_of_rows:
814 if f.GetNextRow() == False:
815 raise StopIteration
816 for col in self._registered_cols:
817 if isinstance(self._table_cols[col], ctypes._SimpleCData):
818 self.__dict__[col] = self._table_cols[col].value
819 self.dict[col] = self._table_cols[col].value
820
821 for col in self.stacked_cols:
822 if isinstance(self.dict[col], type(np.array(''))):
823 self.stacked_cols[col] = np.vstack( (self.stacked_cols[col],self.dict[col]) )
824 else:
825 self.stacked_cols[col] = np.vstack( (self.stacked_cols[col],np.array(self.dict[col])) )
826 self._current_row += 1
827 else:
828 raise StopIteration
829 return self
830
831 def show(self):
832 """
833 """
834 pprint.pprint(self.dict)
835
836
837
838
839class fnames( object ):
840 """ organize file names of a FACT data run
841
842 """
843
844 def __init__(self, specifier = ['012', '023', '2011', '11', '24'],
845 rpath = '/scratch_nfs/res/bsl/',
846 zipped = True):
847 """
848 specifier : list of strings defined as:
849 [ 'DRS calibration file', 'Data file', 'YYYY', 'MM', 'DD']
850
851 rpath : directory path for the results; YYYYMMDD will be appended to rpath
852 zipped : use zipped (True) or unzipped (Data)
853
854 """
855
856 self.specifier = specifier
857 self.rpath = rpath
858 self.zipped = zipped
859
860 self.make( self.specifier, self.rpath, self.zipped )
861
862
863 def make( self, specifier, rpath, zipped ):
864 """ create (make) the filenames
865
866 names : dictionary of filenames, tags { 'data', 'drscal', 'results' }
867 data : name of the data file
868 drscal : name of the drs calibration file
869 results : radikal of file name(s) for results (to be completed by suffixes)
870 """
871
872 self.specifier = specifier
873
874 if zipped:
875 dpath = '/data00/fact-construction/raw/'
876 ext = '.fits.gz'
877 else:
878 dpath = '/data03/fact-construction/raw/'
879 ext = '.fits'
880
881 year = specifier[2]
882 month = specifier[3]
883 day = specifier[4]
884
885 yyyymmdd = year + month + day
886 dfile = specifier[1]
887 cfile = specifier[0]
888
889 rpath = rpath + yyyymmdd + '/'
890 self.rpath = rpath
891 self.names = {}
892
893 tmp = dpath + year + '/' + month + '/' + day + '/' + yyyymmdd + '_'
894 self.names['data'] = tmp + dfile + ext
895 self.names['drscal'] = tmp + cfile + '.drs' + ext
896 self.names['results'] = rpath + yyyymmdd + '_' + dfile + '_' + cfile
897
898 self.data = self.names['data']
899 self.drscal = self.names['drscal']
900 self.results = self.names['results']
901
902 def info( self ):
903 """ print complete filenames
904
905 """
906
907 print 'file names:'
908 print 'data: ', self.names['data']
909 print 'drs-cal: ', self.names['drscal']
910 print 'results: ', self.names['results']
911
912# end of class definition: fnames( object )
913
914def _test_SlowData( filename ):
915 print '-'*70
916 print "opened :", filename, " as 'file'"
917 print
918 print '-'*70
919 print 'type file.show() to look at its contents'
920 print "type file.register( columnname ) or file.register('all') in order to register columns"
921 print
922 print " due column-registration you declare, that you would like to retrieve the contents of one of the columns"
923 print " after column-registration, the 'file' has new member variables, they are named like the columns"
924 print " PLEASE NOTE: immediatly after registration, the members exist, but they are empty."
925 print " the values are assigned only, when you call file.next() or when you loop over the 'file'"
926 print
927 print "in order to loop over it, just go like this:"
928 print "for row in file:"
929 print " print row.columnname_one, row.columnname_two"
930 print
931 print ""
932 print '-'*70
933
934
935
936def _test_iter( nevents ):
937 """ test for function __iter__ """
938
939 data_file_name = '/fact/raw/2011/11/24/20111124_117.fits.gz'
940 calib_file_name = '/fact/raw/2011/11/24/20111124_114.drs.fits.gz'
941 print 'the files for this test are:'
942 print 'data file:', data_file_name
943 print 'calib file:', calib_file_name
944# data_file_name = '/home/luster/win7/FACT/data/raw/20120114/20120114_028.fits.gz'
945# calib_file_name = '/home/luster/win7/FACT/data/raw/20120114/20120114_022.drs.fits.gz'
946 run = RawData( data_file_name, calib_file_name , return_dict=True)
947
948 for event in run:
949 print 'ev ', event['event_id'], 'data[0,0] = ', event['acal_data'][0,0], 'start_cell[0] = ', event['start_cells'][0], 'trigger type = ', event['trigger_type']
950 if run.event_id == nevents:
951 break
952
953if __name__ == '__main__':
954 """ tests """
955 import sys
956 if len(sys.argv) == 1:
957 print 'showing test of iterator of RawData class'
958 print 'in order to test the SlowData classe please use:', sys.argv[0], 'fits-file-name'
959 _test_iter(10)
960
961
962 else:
963 print 'showing test of SlowData class'
964 print 'in case you wanted to test the RawData class, please give no commandline arguments'
965 file = SlowData(sys.argv[1])
966 _test_SlowData(sys.argv[1])
Note: See TracBrowser for help on using the repository browser.