#!/usr/bin/python -tt # # from pyfact import RawData from drs_spikes import DRSSpikes from fir_filter import CFD from fir_filter import SlidingAverage import os.path import sys import numpy as np try: import h5py except: print 'module h5py needed for this process ... aborting' sys.exit(1) data_filename = 'data/20111017_010.fits.gz' calib_filename = 'data/20111017_006.drs.fits.gz' run = RawData(data_filename, calib_filename, return_dict = True, do_calibration=True) despike = DRSSpikes() smooth = SlidingAverage(7) cfd = CFD() thr = 3 filter_delay = 3 search_window_size = 12 # shortcut sws = search_window_size # we try to determine the: # * Position # * and Height (in two versions: filtered, and unfiltered data) # of dark count peaks in: # a) all events # b) all pixel # # we limit ourselfs to 10 peaks per pixel max! # and 1000 events max! # # So in order to store this stuff, we prepare an n-dim-array PEAKS_MAX = 30 # In order not to allocate so much mem, we use hdf5 files for storing this information f = h5py.File('20111017_010-006.h5', 'w') # we write some meta data into the file, so we know afterwards, where this stuff # came from f.attrs['data_file'] = os.path.realpath(data_filename) f.attrs['calib_file'] = os.path.realpath(calib_filename) # this script f.attrs['source_python_script'] = open(sys.argv[0], 'r').read() # SVN revision, should go here ... but of what? this file?? f.attrs['settings'] = [ 'threshold = 3', 'SlidingAverage = 7 --> Filter delay = 3' 'Search Window Size = 12' 'Maximum Number of Peaks, in data sets = 30' 'Default value of peak_positions = -1' 'Default value of peak_height_unfiltered = -np.inf' 'Default value of peak_height_smoothed = -np.inf' ] result_peak_positions = f.create_dataset('peak_positions', (run.nevents, run.npix, PEAKS_MAX), dtype=np.int16, fillvalue=-1, compression='gzip') result_peak_unfiltered_height = f.create_dataset('peak_height_unfiltered', (run.nevents, run.npix, PEAKS_MAX), dtype=np.float32, fillvalue=-np.inf, compression='gzip') result_peak_smoothed_height = f.create_dataset('peak_height_smoothed', (run.nevents, run.npix, PEAKS_MAX), dtype=np.float32, fillvalue=-np.inf, compression='gzip') try: for event in run: event_id = event['event_id'].value - 1 data = event['acal_data'] data = despike(data) data_orig = data.copy() data = smooth(data) filtered = cfd(data) filtered = smooth(filtered) # this is a loop over all pixel of this event pixel_id = -1 for dat, fil, orig in zip(data, filtered, data_orig): pixel_id += 1 print event_id, pixel_id prod = fil[:-1] * fil[1:] cand = np.where( prod <= 0)[0] if len(cand) == 0: continue # zero crossing with rising edge cross = cand[np.where(fil[cand] < 0)[0]] if len(cross) == 0: continue over_thr = cross[np.where(dat[cross-4] > thr)[0]] # Now since we have these values, we will throw away all those, # which are probably on a falling edge of its predecessor dover = np.diff(over_thr) if len(dover) == 0: good = over_thr else: good = [] good.append(over_thr[0]) for i in range(len(dover)): if dover[-i-1] > 100: good.append(over_thr[-i-1]) good = np.array(sorted(good)) # these positions, we just found, do not exactly point to the maximum # of a peak, but the peak will be on the left side of it. # we use the smoothed data to find the position of the local maximum # and then stopre this position and the value of both # the smoothed data and the original data. max_pos = np.zeros( good.shape ) max_smoothed = np.zeros( good.shape ) max_orig = np.zeros( good.shape ) for i in range(len(good)): # We search for a local maximum in a window of size 12 if len(dat[good[i]-sws:good[i]]) > 0: max_pos[i] = good[i]-sws + np.argmax(dat[good[i]-sws:good[i]]) max_smoothed[i] = dat[max_pos[i]] max_orig[i] = orig[max_pos[i]-filter_delay] result_peak_positions[event_id,pixel_id, :len(max_pos)] = max_pos result_peak_unfiltered_height[event_id,pixel_id, :len(max_pos)] =max_orig result_peak_smoothed_height[event_id,pixel_id, :len(max_pos)] = max_smoothed f.close() except: print 'An Exception occured .. closing file' f.close() raise