Index: fact/tools/pyscripts/sandbox/dneise/cleaning/area_vs_size.py
===================================================================
--- fact/tools/pyscripts/sandbox/dneise/cleaning/area_vs_size.py	(revision 13415)
+++ fact/tools/pyscripts/sandbox/dneise/cleaning/area_vs_size.py	(revision 13415)
@@ -0,0 +1,81 @@
+#!/usr/bin/python -itt
+#
+# Dominik Neise
+#
+# cleaning a small step towards the truth
+from pyfact import RawData
+import os.path
+import matplotlib.pyplot as plt
+import numpy as np
+from fir_filter import *
+from extractor import *
+from drs_spikes import *
+from plotters import *
+import time as t
+from cleaners import AmplitudeCleaner
+from image_extractors import SimpleArea, SimpleSize
+confirm_next_step = False# this is for user interaction
+
+data_file_name = 'data/20120223_212.fits.gz'
+calib_file_name = 'data/20120223_206.drs.fits.gz'
+if not os.path.isfile(data_file_name):
+    print 'not able to find file:', data_file_name
+    sys.exit(-1)
+if not os.path.isfile(calib_file_name ):
+    print 'not able to find file:', calib_file_name 
+    sys.exit(-1)
+
+run = RawData(data_file_name, calib_file_name)
+despike = DRSSpikes()
+smooth = SlidingAverage(8)
+extract = GlobalMaxFinder(40,200)
+cleaner = AmplitudeCleaner(45,18)
+area = SimpleArea()
+size = SimpleSize()
+
+#plotA = CamPlotter('amplitudes')
+#plotT = CamPlotter('times')
+#plotCA = CamPlotter('cleaned amplitudes')
+
+#plotArea = HistPlotter('area', 1440, (0,1440) )
+#plotSize = HistPlotter('size', 1000, (0,10000) )
+
+
+areas = []
+sizes = []
+for data,startcell,tt in run:
+    # trigger type 4 means 'physics event'
+    if tt==4:
+        data = despike(data)
+        data = smooth(data)
+        amplitude, time_of_max = extract(data)
+        survivors = cleaner(amplitude, return_bool_mask=False )
+        areas.append( area(survivors) )
+        sizes.append( size(survivors, amplitude) )
+
+        if confirm_next_step:
+            user_input = raw_input("'q'-quit, 'r'-run, anything else goes one step")
+            number=None
+            try:
+                number=int(user_input)
+            except:
+                number=None
+            if user_input.find('q') != -1:
+                sys.exit(0)
+            elif user_input.find('r') != -1:
+                confirm_next_step = False
+            elif number!=None:
+                run += number
+
+
+plt.ion()
+myfig = plt.figure()
+myn = myfig.number
+logsize = np.log10(np.array(sizes))
+areas = np.array(areas)
+
+plt.figure(myn)
+plt.title('area vs. log10(size) of '+ str(run.event_id.value) + 'events')
+plt.xlabel('log10(size/1mV)')
+plt.ylabel('area [#pixel]')
+plt.plot( logsize,areas, '.')
Index: fact/tools/pyscripts/sandbox/dneise/cleaning/begin.py
===================================================================
--- fact/tools/pyscripts/sandbox/dneise/cleaning/begin.py	(revision 13415)
+++ fact/tools/pyscripts/sandbox/dneise/cleaning/begin.py	(revision 13415)
@@ -0,0 +1,139 @@
+#!/usr/bin/python -itt
+#
+# Dominik Neise
+#
+# cleaning a small step towards the truth
+from pyfact import RawData
+import os.path
+import matplotlib.pyplot as plt
+import numpy as np
+from fir_filter import *
+from extractor import *
+from drs_spikes import *
+from plotters import *
+import time as t
+from coor import Coordinator
+confirm_next_step = False# this is for user interaction
+
+data_file_name = '/media/daten_platte/FACT/data/20120229_144.fits.gz'
+calib_file_name = '/media/daten_platte/FACT/data/20120229_132.drs.fits.gz'
+if not os.path.isfile(data_file_name):
+    print 'not able to find file:', data_file_name
+    sys.exit(-1)
+if not os.path.isfile(calib_file_name ):
+    print 'not able to find file:', calib_file_name 
+    sys.exit(-1)
+
+run = RawData(data_file_name, calib_file_name)
+despike = DRSSpikes()
+smooth = SlidingAverage(8)
+extract = GlobalMaxFinder(40,200)
+
+#plotA = CamPlotter('amplitudes')
+#plotT = CamPlotter('times')
+#plotCA = CamPlotter('cleaned amplitudes')
+
+#plotArea = HistPlotter('area', 1440, (0,1440) )
+#plotSize = HistPlotter('size', 1000, (0,10000) )
+
+#funnyPlot = Plotter('area vs log(size')
+
+
+coreTHR = 45 # copied from F. Temme
+edgeTHR = 18 # copied from F. Temme
+
+# get dictionary of next neighbors
+nn = Coordinator().nn
+
+print 'Core threshold=', coreTHR
+print 'Edge threshold=', edgeTHR
+print '*'*70
+
+areas = []
+sizes = []
+
+for data,startcell,tt in run:
+    if tt==4:
+        data = despike(data)
+        data = smooth(data)
+        amplitude, time_of_max = extract(data)
+        
+        #plotA.name='amplitudes EvtID:' + str(run.event_id.value) + ' TT:' + str(tt)
+        #plotA(amplitude)
+        #plotT(time_of_max)
+        
+        # Here we start the cleaning ... just like that...
+        #print 'cleaning away all pixel <' , coreTHR
+        core_chid_candidates = np.where( amplitude > coreTHR)[0]
+        core_chids = [] # coor chids, which survive Gauks step 1
+        survivors = [] # coor & edge pixel 
+        #print 'number of core candidates:', len(core_chid_candidates)
+        
+        #print 'throwing away all pixel w/o any neighbor' 
+        # get rid of single coor pics
+        for cand in core_chid_candidates:
+            neighbor_found = False
+            # loop over all neigbors of cand'idate
+            for n in nn[cand]:
+                if n in core_chid_candidates:
+                    neighbor_found = True
+            if neighbor_found:
+                core_chids.append(cand)
+                
+        #print 'after deletion of single coor pixels'
+        
+        
+        #add edge pixel to the edge of the coors
+        #print 'resurrecting edge pixels ... i.e. all pixel >', edgeTHR
+        survivors = core_chids[:]
+        for coor in core_chids:
+            for n in nn[coor]:
+                # if neighbor is a core pixel, then do nothing
+                if n in core_chids:
+                    pass
+                elif amplitude[n] > edgeTHR:
+                    survivors.append(n)
+        survivors = np.array(survivors, dtype=int)
+        print '#survivors', len(survivors), 'evtID', run.event_id.value
+        
+        #plotCA(data=amplitude, mask=survivors)
+        
+        size = 0
+        for pixel in survivors:
+            size += amplitude[pixel]
+        
+        if len(survivors) > 0:
+            areas.append( len(survivors) )
+            sizes.append( size )
+        
+        
+        #plotArea(areas, 'areas of ' + str(run.event_id.value) + 'events')
+        #plotSize(sizes, 'sizes of ' + str(run.event_id.value) + 'events')
+        
+        if confirm_next_step:
+            user_input = raw_input("'q'-quit, 'r'-run, anything else goes one step")
+            number=None
+            try:
+                number=int(user_input)
+            except:
+                number=None
+            if user_input.find('q') != -1:
+                sys.exit(0)
+            elif user_input.find('r') != -1:
+                confirm_next_step = False
+            elif number!=None:
+                run += number
+                
+        total_event_number = run.event_id.value
+
+plt.ion()
+myfig = plt.figure()
+myn = myfig.number
+logsize = np.log10(np.array(sizes))
+areas = np.array(areas)
+
+plt.figure(myn)
+plt.title('area vs. log10(size) of '+ str(total_event_number) + 'events')
+plt.xlabel('log10(size/1mV)')
+plt.ylabel('area [#pixel]')
+plt.plot( logsize,areas, '.')
Index: fact/tools/pyscripts/sandbox/dneise/timecal.py
===================================================================
--- fact/tools/pyscripts/sandbox/dneise/timecal.py	(revision 13414)
+++ fact/tools/pyscripts/sandbox/dneise/timecal.py	(revision 13415)
@@ -11,5 +11,5 @@
 from pyfact     import RawData
 from drs_spikes import DRSSpikes
-from extractors import ZeroXing
+from extractor import ZeroXing
 class CalculateDrsTimeCalibrationConstants( object ):
     """ basic analysis class for the calculation of DRS time aperture jitter calibration constants
@@ -34,5 +34,5 @@
         self.fsampling = 2.         # sampling frequency
         self.freq = 250.            # testfrequency
-        self.P_nom = 1000./freq     # nominal Period due to testfrequency
+        self.P_nom = 1000./self.freq     # nominal Period due to testfrequency
         self.DAMPING = 0.02         # Damping applied to correction
         
@@ -48,5 +48,8 @@
         """        
         # loop over events
+        counter = 0
         for event in self.run:
+            print 'counter', counter
+            counter +=1
             # in the next() method, which is called during looping,
             # the data is automatically amplitude calibrated.
@@ -60,5 +63,5 @@
             start_cell = event['start_cells'][niners_ids,:]
             
-            data = despike( data )
+            data = self.despike( data )
             
             # shift data down by the mean           # possible in one line
@@ -111,5 +114,5 @@
             
             # first we make the sublists of the lists to be numpy.arrays 
-            for i in len(float_slice_of_all_crossings): #both list have equal length
+            for i in range(len(float_slice_of_all_crossings)): #both list have equal length
                 float_slice_of_all_crossings[i] = np.array(float_slice_of_all_crossings[i])
                 time_of_all_crossings[i] = np.array(time_of_all_crossings[i])
@@ -120,5 +123,5 @@
                 all_measured_periods.append(np.diff(chip_times))
             
-            for chid,chip_periods in enumerate(all_measured_periods):
+            for chip,chip_periods in enumerate(all_measured_periods):
                 
                 for i,period in enumerate(chip_periods):
@@ -144,6 +147,7 @@
                     total_negative_correction = rest * (interval * (pos_fraction/rest))
                     # so we can call the product following 'rest' the 'neg_fraction
-                    neg_fraction = -1 * interval/rest * important
-                    assert total_positive_correction - total_negative_correction == 0
+                    neg_fraction = -1 * interval/rest * pos_fraction
+                    #print '1. should be zero', total_positive_correction - total_negative_correction
+                    #assert total_positive_correction - total_negative_correction == 0
                     
                     correction = np.zeros(1024)
@@ -151,5 +155,6 @@
                     correction[:start+1]      = neg_fraction
                     correction[end+1:]        = neg_fraction
-                    assert correction.sum() == 0
+                    #print '2.   should be zero', correction.sum()
+                    #assert correction.sum() == 0
                     
                     # now we have to add these correction values to self.time_calib
