source: fact/tools/pyscripts/pyfact/pyfact.py@ 17690

Last change on this file since 17690 was 17690, checked in by dneise, 11 years ago
reverting back to copied headers, because SVN externals do not yet work :-(
  • Property svn:executable set to *
File size: 36.3 KB
Line 
1#!/usr/bin/python -tt
2#
3# Werner Lustermann, Dominik Neise
4# ETH Zurich, TU Dortmund
5#
6from ctypes import *
7import numpy as np
8import pprint # for SlowData
9from scipy import signal
10
11# get the ROOT stuff + my shared libs
12from ROOT import gSystem
13# factfits_h.so is made from factfits.h and is used to access the data
14# make sure the location of factfits_h.so is in LD_LIBRARY_PATH.
15# having it in PYTHONPATH is *not* sufficient
16gSystem.Load('fits_h.so')
17gSystem.Load('izstream_h.so')
18gSystem.Load('zfits_h.so')
19gSystem.Load('factfits_h.so')
20gSystem.Load('calfactfits_h.so')
21from ROOT import *
22
23class RawDataFeeder( object ):
24 """ Wrapper class for RawData class
25 capable of iterating over multiple RawData Files
26 """
27
28 def __init__(self, filelist):
29 """ *filelist* list of files to iterate over
30 the list should contain tuples, or sublists of two filenames
31 the first should be a data file (\*.fits.gz)
32 the second should be an amplitude calibration file(\*.drs.fits.gz)
33 """
34
35 self.__module__ = 'pyfact'
36
37 # sanity check for input
38 if type(filelist) != type(list()):
39 raise TypeError('filelist should be a list')
40 for entry in filelist:
41 if len(entry) != 2:
42 raise TypeError('the entries of filelist should have length == 2')
43 for path in entry:
44 if type(path) != type(str()):
45 raise TypeError('the entries of filelist should be path, i.e. of type str()')
46 #todo check if 'path' is a valid path
47 # else: throw an Exception, or Warning?
48
49 self.filelist = filelist
50 self._current_RawData = RawData(filelist[0][0], filelist[0][1], return_dict=True)
51 del filelist[0]
52
53 def __iter__(self):
54 return self
55
56 def next():
57 """ Method being called by the iterator.
58 Since the RawData Objects are simply looped over, the event_id from the
59 RawData object will not be unique.
60 Each RawData obejct will start with event_id = 1 as usual.
61 """
62 try:
63 return self._current_RawData.next()
64 except StopIteration:
65 # current_RawData was completely processed
66 # delete it (I hope this calls the destructor of the fits file and/or closes it)
67 del self._current_RawData
68 # and remake it, if possible
69 if len(self.filelist) > 0:
70 self._current_RawData = RawData(filelist[0][0], filelist[0][1], return_dict=True)
71 del filelist[0]
72 else:
73 raise
74
75
76
77class RawData( object ):
78 """ raw data access and calibration
79
80 class is **iterable**
81
82 - open raw data file and drs calibration file
83 - performs amplitude calibration
84 - performs baseline substraction if wanted
85 - provides all data in an array:
86 row = number of pixel
87 col = length of region of interest
88
89 """
90
91
92 def __init__(self, data_file_name, calib_file_name,
93 baseline_file_name='',
94 return_dict = True,
95 use_CalFactFits = True,
96 do_calibration = True,
97 user_action_calib=lambda acal_data, data, blm, tom, gm, scells, nroi: None):
98 """ -constructor-
99
100 - open data file and calibration data file
101 - get basic information about the data in data_file_name
102 - allocate buffers for data access
103
104 *data_file_name* : fits or fits.gz file of the data including the path
105
106 *calib_file_name* : fits or fits.gz file containing DRS calibration data
107
108 *baseline_file_name* : npy file containing the baseline values
109
110 *return_dict* : this option will be removed in future releases.
111 formerly the next() method returned only a subset of (important) event information,
112 and it was not transparent how to retrieve the other (less important) information.
113 Nowadays next() returns self.__dict__ which contains everything we were able to find in the fits file.
114
115 *use_CalFactFits* : formerly the DRS amplitude calibration was
116 implemented in python. But for performance reasons this was now moved into
117 a C++ class called CalFactFits. For test purposes, this option can be set to
118 False, but this is not really maintained anymore. If DRS the DRS calibration algorithm is
119 beeing updated in C++ it may not be updated in the python implementation.
120
121 *do_calibration* : In case *use_CalFactFits* is False, one may choose
122 not to calibrate the data at all, thus safe quite some time.
123 This is imho only needed in case one is interesting in learning something about the
124 calibration algorithm itself.
125
126 *user_action_calib* : callback function, intended for tests of the DRS calibration algorithm.
127 but since this is not done in the Python regime anymore, this function is never called.
128 (depending on *use_CalFactFits* of course)
129 """
130 self.__module__='pyfact'
131 # manual implementation of default value, but I need to find out
132 # if the user of this class is aware of the new option
133 if return_dict == False:
134 print 'DEPRECATION WARNING:'
135 print 'you are using RawData in a way, which is nor supported anymore.'
136 print ' Please set: return_dict = True, in the __init__ call'
137 self.return_dict = return_dict
138 self.use_CalFactFits = use_CalFactFits
139
140 self.do_calibration = do_calibration
141
142 self.data_file_name = data_file_name
143 self.calib_file_name = calib_file_name
144 self.baseline_file_name = baseline_file_name
145
146 self.user_action_calib = user_action_calib
147
148 # baseline correction: True / False
149 if len(baseline_file_name) == 0:
150 self.correct_baseline = False
151 else:
152 self.correct_baseline = True
153
154
155 # access data file
156 if use_CalFactFits:
157 try:
158 data_file = CalFactFits(data_file_name, calib_file_name)
159 except IOError:
160 print 'problem accessing data file: ', data_file_name
161 raise # stop ! no data
162
163 #: either CalFactFits object or FactFits object, depending on *use_CalFactFits*
164 self.data_file = data_file
165 #: 1440x300 nparray containing the event data. pixel sorted according to CHID
166 self.data = np.empty( data_file.npix * data_file.nroi, np.float64)
167 data_file.SetNpcaldataPtr(self.data)
168 self.data = self.data.reshape( data_file.npix, data_file.nroi )
169 #: copy of data. here for historical reasons
170 self.acal_data = self.data
171 #: region of interest. (number of DRS slices read).
172 # for FACT data mostly 300. for special runs sometimes 1024.
173 self.nroi = data_file.nroi
174 #: number of Pixel in FACT. should be 1440
175 self.npix = data_file.npix
176 #: the total number of events in the data_file
177 self.nevents = data_file.nevents
178
179 # Data per event
180 #: starting at 1
181 self.event_id = None
182
183 #: data=4 ; the rest I don't know by heart .. should be documented here :-)
184 self.trigger_type = None
185 #self.start_cells = None
186 #self.board_times = None
187 #: slice where drs readout started for all DRS chips (160) .. but enlarged to the size of 1440 pixel. thus there are always 9 equal numbers inside.
188 self.start_cells = np.zeros( self.npix, np.int16 )
189 #: each FAD has an onboard clock running from startup time. Currently I don't know the time unit. However this is an array of 40 times, since we have 40 boards.
190 self.board_times = np.zeros( 40, np.int32 )
191 self._unixtime_tuple = np.zeros( 2, np.int32 )
192 self.unixtime = None
193
194 # data_file is a CalFactFits object
195 # data_file.datafile is one of the two FactFits objects hold by a CalFactFits.
196 # sorry for the strange naming ..
197 data_file.datafile.SetPtrAddress('StartCellData', self.start_cells)
198 data_file.datafile.SetPtrAddress('BoardTime', self.board_times)
199 data_file.datafile.SetPtrAddress('UnixTimeUTC', self._unixtime_tuple)
200
201
202 else:
203 try:
204 data_file = factfits(self.data_file_name)
205 except IOError:
206 print 'problem accessing data file: ', data_file_name
207 raise # stop ! no data
208
209 self.data_file = data_file
210
211 # get basic information about the data file
212 self.nroi = data_file.GetUInt('NROI')
213 self.npix = data_file.GetUInt('NPIX')
214 self.nevents = data_file.GetNumRows()
215
216 # allocate the data memories
217 self.event_id = c_ulong()
218 self.trigger_type = c_ushort()
219 self.data = np.zeros( self.npix * self.nroi, np.int16 ).reshape(self.npix ,self.nroi)
220 self.start_cells = np.zeros( self.npix, np.int16 )
221 self.board_times = np.zeros( 40, np.int32 )
222 self._unixtime_tuple = np.zeros(2, np.int32 )
223
224 # set the pointers to the data++
225 data_file.SetPtrAddress('EventNum', self.event_id)
226 data_file.SetPtrAddress('TriggerType', self.trigger_type)
227 data_file.SetPtrAddress('StartCellData', self.start_cells)
228 data_file.SetPtrAddress('Data', self.data)
229 data_file.SetPtrAddress('BoardTime', self.board_times)
230 data_file.SetPtrAddress('UnixTimeUTC', self._unixtime_tuple)
231
232 # open the calibration file
233 try:
234 calib_file = factfits(self.calib_file_name)
235 except IOError:
236 print 'problem accessing calibration file: ', calib_file_name
237 raise
238 #: drs calibration file
239 self.calib_file = calib_file
240
241 baseline_mean = calib_file.GetN('BaselineMean')
242 gain_mean = calib_file.GetN('GainMean')
243 trigger_offset_mean = calib_file.GetN('TriggerOffsetMean')
244
245 self.Nblm = baseline_mean / self.npix
246 self.Ngm = gain_mean / self.npix
247 self.Ntom = trigger_offset_mean / self.npix
248
249 self.blm = np.zeros(baseline_mean, np.float32).reshape(self.npix , self.Nblm)
250 self.gm = np.zeros(gain_mean, np.float32).reshape(self.npix , self.Ngm)
251 self.tom = np.zeros(trigger_offset_mean, np.float32).reshape(self.npix , self.Ntom)
252
253 calib_file.SetPtrAddress('BaselineMean', self.blm)
254 calib_file.SetPtrAddress('GainMean', self.gm)
255 calib_file.SetPtrAddress('TriggerOffsetMean', self.tom)
256 calib_file.GetRow(0)
257
258 # make calibration constants double, so we never need to roll
259 self.blm = np.hstack((self.blm, self.blm))
260 self.gm = np.hstack((self.gm, self.gm))
261 self.tom = np.hstack((self.tom, self.tom))
262
263 self.v_bsl = np.zeros(self.npix) # array of baseline values (all ZERO)
264
265 def __iter__(self):
266 """ iterator """
267 return self
268
269 def next(self):
270 """ used by __iter__
271
272 returns self.__dict__
273 """
274 if self.use_CalFactFits:
275 if self.data_file.GetCalEvent() == False:
276 raise StopIteration
277 else:
278 self.event_id = self.data_file.event_id
279 self.trigger_type = self.data_file.event_triggertype
280 #self.start_cells = self.data_file.event_offset
281 #self.board_times = self.data_file.event_boardtimes
282 #self.acal_data = self.data.copy().reshape(self.data_file.npix, self.data_file.nroi)
283
284 self.unixtime = self._unixtime_tuple[0] + self._unixtime_tuple[1]/1.e6
285
286 else:
287 if self.data_file.GetNextRow() == False:
288 raise StopIteration
289 else:
290 if self.do_calibration == True:
291 self.calibrate_drs_amplitude()
292
293 #print 'nevents = ', self.nevents, 'event_id = ', self.event_id.value
294 if self.return_dict:
295 return self.__dict__
296 else:
297 return self.acal_data, self.start_cells, self.trigger_type.value
298
299 def next_event(self):
300 """ ---- DEPRICATED ----
301
302 load the next event from disk and calibrate it
303 """
304 if self.use_CalFactFits:
305 self.data_file.GetCalEvent()
306 else:
307 self.data_file.GetNextRow()
308 self.calibrate_drs_amplitude()
309
310 def calibrate_drs_amplitude(self):
311 """ --- DEPRICATED ---
312
313 since the DRS calibration is done by the C++ class CalFactFits
314
315 perform the drs amplitude calibration of the event data
316 """
317 # shortcuts
318 blm = self.blm
319 gm = self.gm
320 tom = self.tom
321
322 to_mV = 2000./4096.
323 #: 2D array with amplitude calibrated dat in mV
324 acal_data = self.data * to_mV # convert ADC counts to mV
325
326
327 for pixel in range( self.npix ):
328 #shortcuts
329 sc = self.start_cells[pixel]
330 roi = self.nroi
331 # rotate the pixel baseline mean to the Data startCell
332 acal_data[pixel,:] -= blm[pixel,sc:sc+roi]
333 # the 'trigger offset mean' does not need to be rolled
334 # on the contrary, it seems there is an offset in the DRS data,
335 # which is related to its distance to the startCell, not to its
336 # distance to the beginning of the physical pipeline in the DRS chip
337 acal_data[pixel,:] -= tom[pixel,0:roi]
338 # rotate the pixel gain mean to the Data startCell
339 acal_data[pixel,:] /= gm[pixel,sc:sc+roi]
340
341
342 self.acal_data = acal_data * 1907.35
343
344 self.user_action_calib( self.acal_data,
345 np.reshape(self.data, (self.npix, self.nroi) ), blm, tom, gm, self.start_cells, self.nroi)
346
347
348 def baseline_read_values(self, file, bsl_hist='bsl_sum/hplt_mean'):
349 """
350 open ROOT file with baseline histogram and read baseline values
351
352 *file* : name of the root file
353
354 *bsl_hist* : path to the histogram containing the basline values
355 """
356
357 try:
358 f = TFile(file)
359 except:
360 print 'Baseline data file could not be read: ', file
361 return
362
363 h = f.Get(bsl_hist)
364
365 for i in range(self.npix):
366 self.v_bsl[i] = h.GetBinContent(i+1)
367
368 f.Close()
369
370 def baseline_correct(self):
371 """ subtract baseline from the data
372
373 DN 08.06.2011: I didn't use this function at all so far... don't know how well it works.
374 """
375
376 for pixel in range(self.npix):
377 self.acal_data[pixel,:] -= self.v_bsl[pixel]
378
379 def info(self):
380 """ print run information
381
382 not very well implemented ... we need more info here.
383 """
384 print 'data file: ', self.data_file_name
385 print 'calib file: ', self.calib_file_name
386 print '... we need more information printed here ... '
387
388# -----------------------------------------------------------------------------
389class RawDataFake( object ):
390 """ raw data FAKE access similar to real RawData access
391
392 DO NOT USE ... its not working
393 """
394
395
396 def __init__(self, data_file_name, calib_file_name,
397 user_action_calib=lambda acal_data, data, blm, tom, gm, scells, nroi: None,
398 baseline_file_name=''):
399 self.__module__='pyfact'
400
401 self.nroi = 300
402 self.npix = 9
403 self.nevents = 1000
404
405 self.simulator = None
406
407 self.time = np.ones(1024) * 0.5
408
409
410 self.event_id = c_ulong(0)
411 self.trigger_type = c_ushort(4)
412 self.data = np.zeros( self.npix * self.nroi, np.int16 ).reshape(self.npix ,self.nroi)
413 self.start_cells = np.zeros( self.npix, np.int16 )
414 self.board_times = np.zeros( 40, np.int32 )
415 def __iter__(self):
416 """ iterator """
417 return self
418
419 def next(self):
420 """ used by __iter__ """
421 self.event_id = c_ulong(self.event_id.value + 1)
422 self.board_times = self.board_times + 42
423
424 if self.event_id.value >= self.nevents:
425 raise StopIteration
426 else:
427 self._make_event_data()
428
429 return self.__dict__
430
431 def _make_event_data(self):
432 sample_times = self.time.cumsum() - time[0]
433
434 # random start cell
435 self.start_cells = np.ones( self.npix, np.int16 ) * np.random.randint(0,1024)
436
437 starttime = self.start_cells[0]
438
439 signal = self._std_sinus_simu(sample_times, starttime)
440
441 data = np.vstack( (signal,signal) )
442 for i in range(8):
443 data = np.vstack( (data,signal) )
444
445 self.data = data
446
447 def _std_sinus_simu(self, times, starttime):
448 period = 10 # in ns
449
450 # give a jitter on starttime
451 starttime = np.random.normal(startime, 0.05)
452
453 phase = 0.0
454 signal = 10 * np.sin(times * 2*np.pi/period + starttime + phase)
455
456 # add some noise
457 noise = np.random.normal(0.0, 0.5, signal.shape)
458 signal += noise
459 return signal
460
461 def info(self):
462 """ print run information
463
464 """
465
466 print 'data file: ', data_file_name
467 print 'calib file: ', calib_file_name
468 print 'calibration file'
469 print 'N baseline_mean: ', self.Nblm
470 print 'N gain mean: ', self.Ngm
471 print 'N TriggeroffsetMean: ', self.Ntom
472
473# -----------------------------------------------------------------------------
474import ctypes
475
476class SlowData( object ):
477 """ -Fact SlowData File-
478
479 A Python wrapper for the fits-class implemented in factfits.h
480 provides easy access to the fits file meta data.
481
482 * dictionary of file metadata - self.meta
483 * dict of table metadata - self.columns
484 * variable table column access, thus possibly increased speed while looping
485 """
486 def __del__(self):
487 del self.f
488
489 def __init__(self, path):
490 """ creates meta and columns dictionaries
491 """
492 import os
493
494 if not os.path.exists(path):
495 raise IOError(path+' was not found')
496 self.path = path
497 self.__module__ = 'pyfact'
498 try:
499 self.f = factfits(path)
500 except IOError:
501 print 'problem accessing data file: ', data_file_name
502 raise # stop ! no data
503
504 self.meta = self._make_meta_dict()
505 self.columns = self._make_columns_dict()
506
507 self._treat_meta_dict()
508
509
510 # list of columns, which are already registered
511 # see method register()
512 self._registered_cols = []
513 # dict of column data, this is used, in order to be able to remove
514 # the ctypes of
515 self._table_cols = {}
516
517 # I need to count the rows, since the normal loop mechanism seems not to work.
518 self._current_row = 0
519
520 self.stacked_cols = {}
521
522 def _make_meta_dict(self):
523 """ This method retrieves meta information about the fits file and
524 stores this information in a dict
525 return: dict
526 key: string - all capital letters
527 value: tuple( numerical value, string comment)
528 """
529 # abbreviation
530 f = self.f
531
532 # intermediate variables for file metadata dict generation
533
534 keys=f.GetPy_KeyKeys()
535 values=f.GetPy_KeyValues()
536 comments=f.GetPy_KeyComments()
537 types=f.GetPy_KeyTypes()
538
539 if len(keys) != len(values):
540 raise TypeError('len(keys)',len(keys),' != len(values)', len(values))
541 if len(keys) != len(types):
542 raise TypeError('len(keys)',len(keys),' != len(types)', len(types))
543 if len(keys) != len(comments):
544 raise TypeError('len(keys)',len(keys),' != len(comments)', len(comments))
545
546 meta_dict = {}
547 for i in range(len(keys)):
548 type = types[i]
549 if type == 'I':
550 value = int(values[i])
551 elif type == 'F':
552 value = float(values[i])
553 elif type == 'B':
554 if values[i] == 'T':
555 value = True
556 elif values[i] == 'F':
557 value = False
558 else:
559 raise TypeError("meta-type is 'B', but meta-value is neither 'T' nor 'F'. meta-value:",values[i])
560 elif type == 'T':
561 value = values[i]
562 else:
563 raise TypeError("unknown meta-type: known meta types are: I,F,B and T. meta-type:",type)
564 meta_dict[keys[i]]=(value, comments[i])
565 return meta_dict
566
567
568 def _make_columns_dict(self):
569 """ This method retrieves information about the columns
570 stored inside the fits files internal binary table.
571 returns: dict
572 key: string column name -- all capital letters
573 values: tuple(
574 number of elements in table field - integer
575 size of element in bytes -- this is not really interesting for any user
576 might be ommited in future versions
577 type - a single character code -- should be translated into
578 a comrehensible word
579 unit - string like 'mV' or 'ADC count'
580 """
581 # abbreviation
582 f = self.f
583
584 # intermediate variables for file table-metadata dict generation
585 keys=f.GetPy_ColumnKeys()
586 #offsets=self.GetPy_ColumnOffsets() #not needed on python level...
587 nums=f.GetPy_ColumnNums()
588 sizes=f.GetPy_ColumnSizes()
589 types=f.GetPy_ColumnTypes()
590 units=f.GetPy_ColumnUnits()
591
592 # zip the values
593 values = zip(nums,sizes,types,units)
594 # create the columns dictionary
595 columns = dict(zip(keys ,values))
596 return columns
597
598 def stack(self, on=True):
599 self.next()
600 for col in self._registered_cols:
601 if isinstance( self.dict[col], type(np.array('')) ):
602 self.stacked_cols[col] = self.dict[col]
603 else:
604# elif isinstance(self.dict[col], ctypes._SimpleCData):
605 self.stacked_cols[col] = np.array(self.dict[col])
606# else:
607# raise TypeError("I don't know how to stack "+col+". It is of type: "+str(type(self.dict[col])))
608
609 def register(self, col_name):
610 """ register for a column in the fits file
611
612 after the call, this SlowData object will have a new member variable
613 self.col_name, if col_name is a key in self.colums
614
615 the value will be updated after each call of next(), or while iterating over self.
616 NB: the initial value is zero(s)
617
618 *col_name* : name of a key in self.columns, or 'all' to choose all.
619 """
620 columns = self.columns
621 if col_name.lower() == 'all':
622 for col in columns:
623 self._register(col)
624 else:
625 #check if colname is in columns:
626 if col_name not in columns:
627 error_msg = 'colname:'+ col_name +' is not a column in the binary table.\n'
628 error_msg+= 'possible colnames are\n'
629 for key in columns:
630 error_msg += key+' '
631 raise KeyError(error_msg)
632 else:
633 self._register(col_name)
634
635 # 'private' method, do not use
636 def _register( self, colname):
637
638 columns = self.columns
639 f = self.f
640 local = None
641
642 number_of_elements = int(columns[colname][0])
643 size_of_elements_in_bytes = int(columns[colname][1])
644 ctypecode_of_elements = columns[colname][2]
645 physical_unit_of_elements = columns[colname][3]
646
647 # snippet from the C++ source code, or header file to be precise:
648 #case 'L': gLog << "bool(8)"; break;
649 #case 'B': gLog << "byte(8)"; break;
650 #case 'I': gLog << "short(16)"; break;
651 #case 'J': gLog << "int(32)"; break;
652 #case 'K': gLog << "int(64)"; break;
653 #case 'E': gLog << "float(32)"; break;
654 #case 'D': gLog << "double(64)"; break;
655
656
657
658 # the fields inside the columns can either contain single numbers,
659 # or whole arrays of numbers as well.
660 # we treat single elements differently...
661 if number_of_elements == 0:
662 return
663 if number_of_elements == 1:
664 # allocate some memory for a single number according to its type
665 if ctypecode_of_elements == 'J': # J is for a 4byte int, i.e. an unsigned long
666 local = ctypes.c_ulong()
667 un_c_type = long
668 elif ctypecode_of_elements == 'I': # I is for a 2byte int, i.e. an unsinged int
669 local = ctypes.c_ushort()
670 un_c_type = int
671 elif ctypecode_of_elements == 'B': # B is for a byte
672 local = ctypes.c_ubyte()
673 un_c_type = int
674 elif ctypecode_of_elements == 'D':
675 local = ctypes.c_double()
676 un_c_type = float
677 elif ctypecode_of_elements == 'E':
678 local = ctypes.c_float()
679 un_c_type = float
680 elif ctypecode_of_elements == 'A':
681 local = ctypes.c_uchar()
682 un_c_type = chr
683 elif ctypecode_of_elements == 'K':
684 local = ctypes.c_ulonglong()
685 un_c_type = long
686 else:
687 raise TypeError('unknown ctypecode_of_elements:',ctypecode_of_elements)
688 else:
689 if ctypecode_of_elements == 'B': # B is for a byte
690 nptype = np.int8
691 elif ctypecode_of_elements == 'A': # A is for a char .. but I don't know how to handle it
692 nptype = np.int8
693 elif ctypecode_of_elements == 'I': # I is for a 2byte int
694 nptype = np.int16
695 elif ctypecode_of_elements == 'J': # J is for a 4byte int
696 nptype = np.int32
697 elif ctypecode_of_elements == 'K': # B is for a byte
698 nptype = np.int64
699 elif ctypecode_of_elements == 'E': # B is for a byte
700 nptype = np.float32
701 elif ctypecode_of_elements == 'D': # B is for a byte
702 nptype = np.float64
703 else:
704 raise TypeError('unknown ctypecode_of_elements:',ctypecode_of_elements)
705 local = np.zeros( number_of_elements, nptype)
706
707 # Set the Pointer Address
708 try:
709 f.SetPtrAddress(colname, local)
710 except TypeError:
711 print 'something was wrong with SetPtrAddress()'
712 print 'Type of colname', type(colname)
713 print 'colname:', colname
714 print 'Type of local', type(local)
715 print 'length of local', len(local)
716 print 'local should be alle zeros, since "local = np.zeros( number_of_elements, nptype)" '
717 raise
718
719 self._table_cols[colname] = local
720 if number_of_elements > 1:
721 self.__dict__[colname] = local
722 self.dict[colname] = local
723 else:
724 # remove any traces of ctypes:
725 self.__dict__[colname] = local.value
726 self.dict[colname] = local.value
727 self._registered_cols.append(colname)
728
729
730 def _treat_meta_dict(self):
731 """make 'interesting' meta information available like normal members.
732 non interesting are:
733 TFORM, TUNIT, and TTYPE
734 since these are available via the columns dict.
735 """
736
737 self.number_of_rows = self.meta['NAXIS2'][0]
738 self.number_of_columns = self.meta['TFIELDS'][0]
739
740 # there are some information in the meta dict, which are alsways there:
741 # there are regarded as not interesting:
742 uninteresting_meta = {}
743 uninteresting_meta['arraylike'] = {}
744 uninteresting = ['NAXIS', 'NAXIS1', 'NAXIS2',
745 'TFIELDS',
746 'XTENSION','EXTNAME','EXTREL',
747 'BITPIX', 'PCOUNT', 'GCOUNT',
748 'ORIGIN',
749 'PACKAGE', 'COMPILED', 'CREATOR',
750 'TELESCOP','TIMESYS','TIMEUNIT','VERSION']
751 for key in uninteresting:
752 if key in self.meta:
753 uninteresting_meta[key]=self.meta[key]
754 del self.meta[key]
755
756 # the table meta data contains
757
758
759 # shortcut to access the meta dict. But this needs to
760 # be cleaned up quickly!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
761 meta = self.meta
762
763 # loop over keys:
764 # * try to find array-like keys
765 arraylike = {}
766 singlelike = []
767 for key in self.meta:
768 stripped = key.rstrip('1234567890')
769 if stripped == key:
770 singlelike.append(key)
771 else:
772 if stripped not in arraylike:
773 arraylike[stripped] = 0
774 else:
775 arraylike[stripped] += 1
776 newmeta = {}
777 for key in singlelike:
778 newmeta[key.lower()] = meta[key]
779 for key in arraylike:
780 uninteresting_meta['arraylike'][key.lower()] = []
781 for i in range(arraylike[key]+1):
782 if key+str(i) in meta:
783 uninteresting_meta['arraylike'][key.lower()].append(meta[key+str(i)])
784 self.ui_meta = uninteresting_meta
785 # make newmeta self
786 for key in newmeta:
787 self.__dict__[key]=newmeta[key]
788
789 dict = self.__dict__.copy()
790 del dict['meta']
791 del dict['ui_meta']
792 self.dict = dict
793
794 def __iter__(self):
795 """ iterator """
796 return self
797
798 def next(self):
799 """ use to iterate over the file
800
801 do not forget to call register() before iterating over the file
802 call show() in order to find out, what parameters register() accepts.
803 or just call register('all') in case you are unsure.
804
805 returns self
806 """
807 # abbreviaition
808 f = self.f
809
810 # Here one might check, if looping makes any sense, and if not
811 # one could stop looping or so...
812 # like this:
813 #
814 # if len(self._registered_cols) == 0:
815 # print 'warning: looping without any registered columns'
816 if self._current_row < self.number_of_rows:
817 if f.GetNextRow() == False:
818 raise StopIteration
819 for col in self._registered_cols:
820 if isinstance(self._table_cols[col], ctypes._SimpleCData):
821 self.__dict__[col] = self._table_cols[col].value
822 self.dict[col] = self._table_cols[col].value
823
824 for col in self.stacked_cols:
825 if isinstance(self.dict[col], type(np.array(''))):
826 self.stacked_cols[col] = np.vstack( (self.stacked_cols[col],self.dict[col]) )
827 else:
828 self.stacked_cols[col] = np.vstack( (self.stacked_cols[col],np.array(self.dict[col])) )
829 self._current_row += 1
830 else:
831 raise StopIteration
832 return self
833
834 def show(self):
835 """
836 """
837 pprint.pprint(self.dict)
838
839
840
841
842class fnames( object ):
843 """ organize file names of a FACT data run
844
845 """
846
847 def __init__(self, specifier = ['012', '023', '2011', '11', '24'],
848 rpath = '/scratch_nfs/res/bsl/',
849 zipped = True):
850 """
851 specifier : list of strings defined as:
852 [ 'DRS calibration file', 'Data file', 'YYYY', 'MM', 'DD']
853
854 rpath : directory path for the results; YYYYMMDD will be appended to rpath
855 zipped : use zipped (True) or unzipped (Data)
856
857 """
858
859 self.specifier = specifier
860 self.rpath = rpath
861 self.zipped = zipped
862
863 self.make( self.specifier, self.rpath, self.zipped )
864
865
866 def make( self, specifier, rpath, zipped ):
867 """ create (make) the filenames
868
869 names : dictionary of filenames, tags { 'data', 'drscal', 'results' }
870 data : name of the data file
871 drscal : name of the drs calibration file
872 results : radikal of file name(s) for results (to be completed by suffixes)
873 """
874
875 self.specifier = specifier
876
877 if zipped:
878 dpath = '/data00/fact-construction/raw/'
879 ext = '.fits.gz'
880 else:
881 dpath = '/data03/fact-construction/raw/'
882 ext = '.fits'
883
884 year = specifier[2]
885 month = specifier[3]
886 day = specifier[4]
887
888 yyyymmdd = year + month + day
889 dfile = specifier[1]
890 cfile = specifier[0]
891
892 rpath = rpath + yyyymmdd + '/'
893 self.rpath = rpath
894 self.names = {}
895
896 tmp = dpath + year + '/' + month + '/' + day + '/' + yyyymmdd + '_'
897 self.names['data'] = tmp + dfile + ext
898 self.names['drscal'] = tmp + cfile + '.drs' + ext
899 self.names['results'] = rpath + yyyymmdd + '_' + dfile + '_' + cfile
900
901 self.data = self.names['data']
902 self.drscal = self.names['drscal']
903 self.results = self.names['results']
904
905 def info( self ):
906 """ print complete filenames
907
908 """
909
910 print 'file names:'
911 print 'data: ', self.names['data']
912 print 'drs-cal: ', self.names['drscal']
913 print 'results: ', self.names['results']
914
915# end of class definition: fnames( object )
916
917def _test_SlowData( filename ):
918 print '-'*70
919 print "opened :", filename, " as 'file'"
920 print
921 print '-'*70
922 print 'type file.show() to look at its contents'
923 print "type file.register( columnname ) or file.register('all') in order to register columns"
924 print
925 print " due column-registration you declare, that you would like to retrieve the contents of one of the columns"
926 print " after column-registration, the 'file' has new member variables, they are named like the columns"
927 print " PLEASE NOTE: immediatly after registration, the members exist, but they are empty."
928 print " the values are assigned only, when you call file.next() or when you loop over the 'file'"
929 print
930 print "in order to loop over it, just go like this:"
931 print "for row in file:"
932 print " print row.columnname_one, row.columnname_two"
933 print
934 print ""
935 print '-'*70
936
937
938
939def _test_iter( nevents ):
940 """ test for function __iter__ """
941
942 data_file_name = '/fact/raw/2011/11/24/20111124_117.fits.gz'
943 calib_file_name = '/fact/raw/2011/11/24/20111124_114.drs.fits.gz'
944 print 'the files for this test are:'
945 print 'data file:', data_file_name
946 print 'calib file:', calib_file_name
947# data_file_name = '/home/luster/win7/FACT/data/raw/20120114/20120114_028.fits.gz'
948# calib_file_name = '/home/luster/win7/FACT/data/raw/20120114/20120114_022.drs.fits.gz'
949 run = RawData( data_file_name, calib_file_name , return_dict=True)
950
951 for event in run:
952 print 'ev ', event['event_id'], 'data[0,0] = ', event['acal_data'][0,0], 'start_cell[0] = ', event['start_cells'][0], 'trigger type = ', event['trigger_type']
953 if run.event_id == nevents:
954 break
955
956if __name__ == '__main__':
957 """ tests """
958 import sys
959 if len(sys.argv) == 1:
960 print 'showing test of iterator of RawData class'
961 print 'in order to test the SlowData classe please use:', sys.argv[0], 'fits-file-name'
962 _test_iter(10)
963
964
965 else:
966 print 'showing test of SlowData class'
967 print 'in case you wanted to test the RawData class, please give no commandline arguments'
968 file = SlowData(sys.argv[1])
969 _test_SlowData(sys.argv[1])
Note: See TracBrowser for help on using the repository browser.