Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/README.txt
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/README.txt	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/README.txt	(revision 13368)
@@ -0,0 +1,28 @@
+# Timecalibration FACT
+#
+# Remo Dietlicher
+# Semesterarbeit ETH Zurich
+#
+
+
+Python macro
+============
+
+
+ftcal.py		program to calculate the time calibration constants vektor of a given dataset.
+
+ftcalapp.py		program based on periods2.py and ftcal2.py. Calculates time calibration constants
+			vector and creates ROOT histograms:
+			1. calculated periods for each vector using the averaged dataset before and after
+				calibration.
+			2. mean deviations from nominal periods. gives a hint of the convergence.
+			
+			3. calculated calibration constants.
+			
+periods2.py		creates the first histogram for ftcalapp.py.
+
+ftcal2.py		calculation of timecalibration constants and average of data over all events.
+
+ftcalsim.py		Simulation to test ftcal.py
+
+
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcal.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcal.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcal.py	(revision 13368)
@@ -0,0 +1,238 @@
+#!/usr/bin/python
+#
+# Werner Lustermann
+# ETH Zurich
+#
+import pyfact
+from myhisto import *
+from hist import *
+import numpy as np
+import numpy.random as rnd
+from scipy import interpolate as ip
+from ROOT import *
+from time import time
+from optparse import OptionParser
+
+t_s = time()
+
+#dfname = '/data03/fact-construction/raw/2011/11/24/20111124_121.fits'
+#calfname = '/data03/fact-construction/raw/2011/11/24/20111124_111.drs.fits'
+
+dfname = '/data00/fact-construction/raw/2011/11/24/20111124_113.fits.gz'
+calfname = '/data00/fact-construction/raw/2011/11/24/20111124_111.drs.fits.gz'
+
+rd = pyfact.rawdata( dfname, calfname )
+
+fsampling = 2. 	# sampling frequency
+freq = 250.        	# testfrequency
+P_nom = 1000./freq 	# nominal Period due to testfrequency
+DAMPING = 0.02		# Damping applied to correction
+NChip = 1		# Number of Chips
+NEvents = 300
+
+
+parser = OptionParser()
+parser.add_option('-e', '--extended', action = 'store_true', dest='extend', default = False )
+(options, args) = parser.parse_args()
+
+save = "yes"
+
+if(options.extend):
+
+	NChip = int(raw_input("Wieviele Chips? ([1, ... , "+str(rd.NPIX/9)+"]): "))
+	NEventsp = int(raw_input("Wieviele Events? ([1, ... , "+str(rd.NEvents)+"]) "))
+	save = raw_input("Soll CellTime gespeichert werden? (yes/no) ")
+	
+	
+h = hist_list(tcalHistograms, NChip, "Chip")
+
+
+# Functions needed for calibration
+
+# find negative-going crossings of mean value
+def Crossing(Data, Mean, t_nom, Start, Chip):
+	TimeXing = np.zeros(rd.NROI)
+	MeanXing = np.zeros(rd.NROI)
+	NumXing = 0
+	CellTime = CellTimef(np.roll(t_nom, -Start))
+	
+	for i in range(rd.NROI-1):
+		if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+			MeanXing[NumXing] = i
+			
+			FirstCell = CellTime[i]
+			SecondCell = CellTime[i+1]
+			
+			TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+			NumXing += 1
+	
+	# calculate the frequency of the testwave
+	freqp = fsampling*NumXing*1000./1024
+	if(np.abs(freqp-freq) > 20):
+		print "This is not a timecalibration run!!"
+		raise SystemExit
+						
+	return MeanXing, TimeXing, NumXing
+		
+def CrossingOliver(Data, Mean, t_nom, Start, Chip):
+	TimeXing = np.zeros(lpipe)
+	MeanXing = np.zeros(lpipe)
+	NumXing = 0
+	CellTime = CellTimef(t_nom)
+	
+	
+	for i in range(lpipe-1):
+		if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+			MeanXing[NumXing] = np.mod(i+Start, lpipe)
+			
+			FirstCell = CellTime[np.mod(i+Start,lpipe)]
+			SecondCell = CellTime[np.mod(i+1+Start, lpipe)]
+			
+			if(SecondCell < FirstCell): SecondCell =+ lpipe/fsampl
+			
+			TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+			NumXing += 1
+			
+				
+	return MeanXing, TimeXing, NumXing
+
+# Determine time between crossings and apply correction
+def Corr(MeanXing, NumXing, TimeXing, t_nom, Start, Chip):
+
+	# loop over mean crossings
+	for i in range(int(NumXing-1)):
+		I1 = MeanXing[i]
+		I2 = MeanXing[i+1]
+		Period = TimeXing[i+1] - TimeXing[i]
+		
+		
+		if(np.abs(1-Period/P_nom) > 0.2):
+			#print "Skipping zero crossing due to too large deviation of period."
+			h.list[Chip].dict["loleftout"].Fill(MeanXing[i])
+			#h.list[Chip].dict["leftout"].Fill(np.mod((MeanXing[i]+Start), rd.NROI))
+			continue
+			
+		#h.list[Chip].dict["lomeanxing"].Fill(MeanXing[i])
+		h.list[Chip].dict["meanxing"].Fill(np.mod((MeanXing[i]+Start), rd.NROI))
+		
+		# calculate the frequency of the testwave
+		freqp = 1000./Period
+		h.list[Chip].dict["freq"].Fill(freqp)
+			
+		C = (P_nom-Period)*DAMPING
+		numnom = Period*fsampling
+		Correction = np.zeros(rd.NROI)
+		Correction[I1+1:I2+1] += C/numnom
+		Correction[:I1+1] -= (I2-I1)*C/(numnom*(rd.NROI-(I2-I1)))
+		Correction[I2+1:] -= (I2-I1)*C/(numnom*(rd.NROI-(I2-I1)))
+		
+		Correction = np.roll(Correction, Start)
+		t_nom += Correction
+	#h.list[Chip].dict["lomeanxing"].Fill(MeanXing[-1])
+	h.list[Chip].dict["meanxing"].Fill(np.mod((MeanXing[-1]+Start), rd.NROI))
+			
+	return t_nom
+	
+def CorrOliver(MeanXing, NumXing, TimeXing, t_nom, Start, Chip):
+	
+	CellTime = CellTimef(t_nom)
+	for i in range(int(NumXing-1)):
+		I1 = MeanXing[i]
+		I2 = MeanXing[i+1]
+		Period = TimeXing[i+1] - TimeXing[i]
+		if(Period<0): Period += rd.NROI/fsampl
+		
+		if(np.abs(1-Period/P_nom) > 0.2):
+			#print "Skipping zero crossing due to too large deviation of period."
+			continue
+			
+		Correction = (P_nom-Period)*DAMPING
+		
+		# Positive correction from MeanXing[i] to MeanXing[i+1]
+		# and negative to all others
+		Time = 0
+		for j in range(rd.NROI):
+			diff = CellTime[j+1] - CellTime[j]
+			if ((I2>I1 and j>I1 and j<=I2) or (I2<I1 and (j<=I2 or j>I1))):
+				diff += Correction/np.mod(I2-I1+rd.NROI, rd.NROI)
+			else:
+				diff -= Correction/(rd.NROI-np.mod(I2-I1+rd.NROI, rd.NROI))
+			CellTime[j] = Time
+			Time += diff
+		CellTime[rd.NROI] = Time
+	
+	print CellTime[-1]	
+	t_nom = t_nomf(CellTime)
+	
+	return t_nom
+	
+
+	
+# Summing up the nominal times for each cell to the total cell time.
+def CellTimef(t_nom):
+	CellTime = np.zeros(rd.NROI+1)
+	for i in range(rd.NROI):
+		CellTime[i+1] = CellTime[i] + t_nom[i]
+		
+	return CellTime
+	
+# getting t_nom from CellTime
+def t_nomf(CellTime):
+	t_nom = np.zeros(rd.NROI)
+	for i in range(rd.NROI):
+		t_nom[i] = CellTime[i+1]-CellTime[i]
+		
+	return t_nom
+	
+
+rd.next()
+NPIX = NChip*9
+NEvents = NEventsp
+
+# create array to put the calibrated times for each cell in
+t_nom = np.ones([NPIX, rd.NROI])/fsampling
+# loop over events
+for Event in range( NEvents ):
+	print "Event ", Event
+	Chip = 0
+
+	# loop over every 9th channel in Chip
+	for Pix in range(NPIX)[8::9]:
+		print "Pix ", Pix
+		
+		# calculate t_nom for each pixel
+		MeanXing, TimeXing, NumXing = Crossing(rd.acalData[Pix], np.average(rd.acalData[Pix]), t_nom[Pix], rd.startCells[Pix], Chip)
+		h.list[Chip].dict["numxing"].Fill(NumXing)
+		#h.list[Chip].dict["start"].Fill(rd.startCells[Pix])
+		t_nom[Pix] = Corr(MeanXing, NumXing, TimeXing, t_nom[Pix], rd.startCells[Pix], Chip)
+		
+		Chip += 1
+		
+	rd.next()
+
+t_nom = t_nom[8::9]
+
+if(save == "yes"):	
+	np.save(str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip), t_nom)
+	np.savetxt(str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip), t_nom)
+
+for Chip in range(NChip):
+	for i in range(rd.NROI):
+		h.list[Chip].dict["int_dev"].SetBinContent(i+1, np.sum(t_nom[Chip][:i])-i*1/fsampling)
+		h.list[Chip].dict["diff_dev"].SetBinContent(i+1, np.sum(t_nom[Chip][i] - 1/fsampling))
+	
+pyfact.SaveHistograms(h.list, str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip)+".root", "RECREATE")
+
+
+t_f = time()
+
+
+
+print "time ellapsed		= ",  (t_f - t_s)/60., " min."	
+print "Speichern		= ", save
+print "NChip			= ", NChip
+print "NEvents			= ", NEventsp
+print "Histo saved as		= ", str(dfname[-20:-8])+str(NEvents)+"x"+str(NChip)+".root"
+if(save == "yes"):
+	print "CellTime saved as	= ", str(dfname[-20:-8])+str(NEvents)+"x"+str(NChip)+".npy"
+
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcal2.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcal2.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcal2.py	(revision 13368)
@@ -0,0 +1,158 @@
+#!/usr/bin/python
+#
+# Remo Dietlicher
+# ETH Zurich
+# Semesterarbeit
+#
+import pyfact
+from myhisto import *
+from hist import *
+import numpy as np
+import numpy.random as rnd
+from scipy import interpolate as ip
+from ROOT import *
+from time import time
+from optparse import OptionParser
+
+def ftcal(NChip, NEventsp, h,
+	      dfname = '/data00/fact-construction/raw/2011/11/24/20111124_014.fits.gz', 
+		  calfname = '/data00/fact-construction/raw/2011/11/24/20111124_012.drs.fits.gz'):
+
+	rd = pyfact.rawdata( dfname, calfname )
+
+	fsampling = 1024./512.	# sampling frequency
+	freq = 250.        		# testfrequency
+	P_nom = 1000./freq 		# nominal Period due to testfrequency
+	DAMPING = 0.02			# Damping applied to correction
+	
+
+	# Functions needed for calibration
+	# find negative-going crossings of mean value
+	def Crossing(Data, Mean, t_nom, Start, Chip):
+		TimeXing = np.zeros(rd.NROI)
+		MeanXing = np.zeros(rd.NROI)
+		NumXing = 0
+		CellTime = CellTimef(np.roll(t_nom, -Start))
+
+		for i in range(rd.NROI-1):
+			if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+				MeanXing[NumXing] = i
+
+				FirstCell = CellTime[i]
+				SecondCell = CellTime[i+1]
+
+				TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+				NumXing += 1
+
+		# calculate the frequency of the testwave
+		freqp = fsampling*NumXing*1000./1024
+		if(np.abs(freqp-freq) > 20):
+			print "This is not a timecalibration run!!"
+			raise SystemExit
+
+		return MeanXing, TimeXing, NumXing
+
+	# Determine time between crossings and apply correction
+	def Corr(MeanXing, NumXing, TimeXing, t_nom, Start, Chip, Event):
+
+		Ctot = np.zeros(NumXing)
+		
+		# loop over mean crossings
+		for i in range(int(NumXing-1)):
+			I1 = MeanXing[i]
+			I2 = MeanXing[i+1]
+			Period = TimeXing[i+1] - TimeXing[i]
+
+
+			if(np.abs(1-Period/P_nom) > 0.2):
+				continue
+			
+
+			if(Event == NEvents-1):
+				h.list[Chip].dict["periods"].Fill(Period)
+			if(Event == 0):
+				h.list[Chip].dict["periods0"].Fill(Period)
+
+
+
+			# calculate the frequency of the testwave
+			freqp = 1000./Period
+
+			C = (P_nom-Period)*DAMPING
+			Ctot[i] = P_nom-Period
+			
+			
+			numnom = Period*fsampling
+			Correction = np.zeros(rd.NROI)
+			Correction[I1+1:I2+1] += C/numnom
+			Correction[:I1+1] -= (I2-I1)*C/(numnom*(rd.NROI-(I2-I1)))
+			Correction[I2+1:] -= (I2-I1)*C/(numnom*(rd.NROI-(I2-I1)))
+
+			Correction = np.roll(Correction, Start)
+			t_nom += Correction
+			
+			
+			if(Event > 200):
+				h.list[Chip].dict["perioddevproj"].Fill(np.average(Ctot))
+			h.list[Chip].dict["perioddev"].SetBinContent(Event+1, np.average(np.abs(Ctot)))	
+
+		return t_nom
+
+
+	# Summing up the nominal times for each cell to the total cell time.
+	def CellTimef(t_nom):
+		CellTime = np.zeros(rd.NROI+1)
+		for i in range(rd.NROI):
+			CellTime[i+1] = CellTime[i] + t_nom[i]
+
+		return CellTime
+
+	# getting t_nom from CellTime
+	def t_nomf(CellTime):
+		t_nom = np.zeros(rd.NROI)
+		for i in range(rd.NROI):
+			t_nom[i] = CellTime[i+1]-CellTime[i]
+
+		return t_nom
+
+
+	rd.next()
+	NPIX = NChip*9
+	NEvents = NEventsp
+
+	# create array to put the calibrated times for each cell in
+	t_nom = np.ones([NPIX, rd.NROI])/fsampling
+	DataTot = np.zeros([NEvents, NChip, rd.NROI])
+	
+	# loop over events
+	for Event in range( NEvents ):
+		print "Event ", Event
+		Chip = 0
+
+		# loop over every 9th channel in Chip
+		for Pix in range(NPIX)[8::9]:
+			print "Pix ", Pix
+
+			# calculate t_nom for each pixel
+			MeanXing, TimeXing, NumXing = Crossing(rd.acalData[Pix], np.average(rd.acalData[Pix]), t_nom[Pix], rd.startCells[Pix], Chip)
+			t_nom[Pix] = Corr(MeanXing, NumXing, TimeXing, t_nom[Pix], rd.startCells[Pix], Chip, Event)
+			
+			DataTot[Event][Chip] = np.roll(rd.acalData[Pix], rd.startCells[Pix])
+			
+			Chip += 1
+
+		rd.next()
+	
+	CellTime = np.zeros([NPIX, rd.NROI+1])
+	
+	DataMean = np.average(DataTot, 0)
+	
+	for Pix in range(NPIX):
+		CellTime[Pix] = CellTimef(t_nom[Pix])
+
+	t_nom = t_nom[8::9]
+	CellTime = CellTime[8::9]
+
+
+	return CellTime, DataMean
+
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcalapp.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcalapp.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcalapp.py	(revision 13368)
@@ -0,0 +1,156 @@
+# Program to calculate integral deviation for several Datasets
+#
+# Remo Dietlicher
+# ETH Zürich
+# Semesterarbeit
+#
+#
+
+import pyfact
+from myhisto import *
+from hist import *
+import numpy as np
+import numpy.random as rnd
+from scipy import interpolate as ip
+from ROOT import *
+from time import time
+from optparse import OptionParser
+from ftcal2 import ftcal
+from periods2 import periods2
+
+
+NROI = 1024				# Region of interest
+NChip = 2				# Number of Chips
+NEvents = 400			# Number of Events
+fsampling = 1024./512. 	# sampling frequency
+
+# Open default Datafiles
+
+Datafiles = []
+
+Datafiles.append(["/data00/fact-construction/raw/2011/11/24/20111124_014.fits.gz",
+				  "/data00/fact-construction/raw/2011/11/24/20111124_012.drs.fits.gz"])
+				  
+Datafiles.append(["/data00/fact-construction/raw/2011/11/24/20111124_034.fits.gz",
+				  "/data00/fact-construction/raw/2011/11/24/20111124_032.drs.fits.gz"])			  
+				  
+Datafiles.append(["/data00/fact-construction/raw/2011/11/24/20111124_053.fits.gz",
+				  "/data00/fact-construction/raw/2011/11/24/20111124_051.drs.fits.gz"])
+				  
+Datafiles.append(["/data00/fact-construction/raw/2011/11/24/20111124_073.fits.gz",
+				  "/data00/fact-construction/raw/2011/11/24/20111124_071.drs.fits.gz"])
+				  
+Datafiles.append(["/data00/fact-construction/raw/2011/11/24/20111124_093.fits.gz",
+				  "/data00/fact-construction/raw/2011/11/24/20111124_091.drs.fits.gz"])
+				  
+Datafiles.append(["/data00/fact-construction/raw/2011/11/24/20111124_113.fits.gz",
+				  "/data00/fact-construction/raw/2011/11/24/20111124_111.drs.fits.gz"])
+				
+				  
+count = 6
+
+h = range(count)
+
+for dat in range(count):
+	h[dat] = hist_list(ftcalappHistograms, NChip, "Chip")
+
+# Parser for extended Version
+
+parser = OptionParser()
+parser.add_option('-e', '--extended', action = 'store_true', dest='extend', default = False )
+(options, args) = parser.parse_args()
+
+path = "/data00/fact-construction/raw/"
+
+
+# Open any Datafile
+
+if(options.extend):
+
+	Datafiles = []
+	
+	year = raw_input("Daten aus welchem Jahr? ")
+	month = raw_input("Daten aus welchem Monat? ")
+	day = raw_input("Daten von welchem Tag? ")
+	path = "/data00/fact-construction/raw/"+str(year)+"/"+str(month)+"/"+str(day)+"/"
+	
+	count = int(raw_input("Wieviele runs? "))
+	h = range(count)
+	
+	
+	for dat in range(count):
+		run = year+month+day+"_"+raw_input("Welcher Datenrun? ")+".fits.gz"
+		drs = year+month+day+"_"+raw_input("Welcher Kalibrationsrun? ")+".drs.fits.gz"
+		Datafiles.append([str(path)+str(run), str(path)+str(drs)])
+		h[dat] = hist_list(ftcalappHistograms, NChip, "Chip")
+		
+
+	NChip = int(raw_input("Wieviele Chips? ([1, ... ,160]): "))
+	NEvents = int(raw_input("Wieviele Events? ([1, ... , 1000]) "))
+
+
+save = raw_input("Sollen die Kalibrationskonstanten Gespeichert weden? (yes/no) ")		
+	
+	
+	
+		
+CellTime = np.zeros([count, NChip, NROI+1])
+DataMean = np.zeros([count, NChip, NROI])
+		
+for dat in range(count):
+	print "Datafile: ", Datafiles[dat][0]
+	CellTime[dat], DataMean[dat] = ftcal(NChip, NEvents, h[dat], Datafiles[dat][0], Datafiles[dat][1])
+
+for dat in range(count):
+	for Chip in range(NChip):
+		for i in range(NROI):
+			h[dat].list[Chip].dict["int_dev"].SetBinContent(i+1, CellTime[dat][Chip][i]-i/fsampling)
+			
+		periods2(DataMean[dat][Chip], CellTime[dat][Chip], h[dat].list[Chip])
+			
+			
+	pyfact.SaveHistograms(h[dat].list, str(Datafiles[dat][0][-20:-8])+"_"+str(NEvents)+"x"+str(NChip)+".root", "RECREATE")
+	
+	
+	print "Histogram saved as: ", str(Datafiles[dat][0][-20:-8])+"_"+str(NEvents)+"x"+str(NChip)+".root"
+			
+	
+
+if(save == "yes"):
+	for dat in range(count):
+		np.save("FtcalKonst/"+str(Datafiles[dat][0][-20:-8])+"_"+str(NEvents)+"x"+str(NChip), CellTime[dat])
+		print "calibration constants saved as: ", str(Datafiles[dat][0][-20:-8])+"_"+str(NEvents)+"x"+str(NChip)+".npy"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcalsim.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcalsim.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/ftcalsim.py	(revision 13368)
@@ -0,0 +1,382 @@
+# simulation of the real data for timecalibration
+#
+# Remo Dietlicher
+# ETH Zürich / Semesterarbeit
+#
+#
+
+import pyfact
+from myhisto import *
+from hist import *
+import numpy as np
+import numpy.random as rnd
+from ROOT import *
+from time import time
+from optparse import OptionParser
+
+NROI  = 1024 		# length of the DRS pipeline
+NEvents  = 200	 	# Number of evaluated Event
+fsampling = 1024./512.1 # sampling frequency
+freq = 250.        	# testfrequency
+P_nom = 1000./freq 	# nominal Period due to testfrequency
+DAMPING = 0.02		# Damping applied to correction
+NChip = 1		# Number of Chips
+
+t_s = time()
+
+parser = OptionParser()
+parser.add_option('-e', '--extended', action = 'store_true', dest='extend', default = False )
+(options, args) = parser.parse_args()
+
+variante = "dat"
+alg = "Remo"
+save = "no"
+
+if(options.extend):
+
+	variante = raw_input("Welche Daten? (sim/dat): ")
+	alg = raw_input("Welcher Algorithmus? (Remo/Oliver): ")
+	NChip = int(raw_input("Wieviele Chips? ([1, ... ,160]): "))
+	NEvents = int(raw_input("Wieviele Events? ([1, ... , 1000]) "))
+	save = raw_input("Soll CellTime gespeichert werden? (yes/no) ")
+	tag = raw_input("Soll ein spezieller tag an das File angehängt werden? ")
+	
+NPix = 9*NChip
+
+h = hist_list(tcalsimHistograms, NChip, "Chip")
+MeanXingHist = np.zeros(NROI)
+
+def Datainit(variante, NEvents):
+
+	if(variante == "sim"):
+		y_off = 3.4
+	
+		# Start is the first bin being written on.
+		Startp = rnd.random(NEvents)*(NROI - 2) 	# project [0, 1] onto int([0, lpipe-1])
+		Start = range(NEvents)
+		for i, val in enumerate(Startp):
+			Start[i] = int(round(val))		# getting integer values needed in roll later
+			
+		ID = np.zeros(NROI)
+		x = np.linspace(0., float(NROI), NROI+1)
+		ID = np.sin(x*2*np.pi/NROI+np.pi)*5
+		
+		dt_true = np.zeros(NROI)
+		for i in range(NROI):
+			dt_true[i] = ID[i+1]-ID[i]
+		
+		versch = np.sum(dt_true)/1024
+		dt_true = dt_true - versch
+		print np.sum(dt_true)
+	
+		#create time-array, time in nanoseconds
+		t_nom = np.ones(NROI)/fsampling
+		t_true = t_nom + dt_true
+	
+		# simulation of the real pipeline
+		rCellTime = np.zeros([NEvents, NROI])
+		for Event in range(NEvents):
+			for i in range(NROI-1):
+				rCellTime[Event][i+1] = rCellTime[Event][i]+t_true[i]
+			rCellTime[Event] = np.roll(rCellTime[Event],-Start[Event])
+			rCellTime[-Start[Event]:] += NROI/fsampling
+			
+		print "t_true tot = ", np.sum(t_true)
+		
+		# calculation of the nominal pipeline
+		def CellTimef(t_nom):
+			CellTime = np.zeros(NROI+1)
+			for i in range(NROI):
+				CellTime[i+1] = CellTime[i] + t_nom[i]
+		
+			return CellTime	
+			
+		CellTime = CellTimef(t_nom)
+		
+		# Simulate Data
+		Data = np.zeros([NEvents, NROI])
+		for Event in range(NEvents):
+			Data[Event] = np.sin(rCellTime[Event]*2.*np.pi/P_nom)+y_off
+	
+	if(variante == "dat"):
+		t_true = 0
+		Data = np.load("d1000.npy")
+		Start = np.load("start_cells_1.npy")
+		
+		
+	return Data, t_true, Start
+	
+Data, t_true, Start = Datainit(variante, NEvents)
+
+
+# Functions needed for calibration
+# calculation of the nominal pipeline
+def CellTimef(t_nom):
+	CellTime = np.zeros(NROI+1)
+	for i in range(NROI):
+		CellTime[i+1] = CellTime[i] + t_nom[i]
+	
+	return CellTime	
+	
+# getting t_nom from CellTime
+def t_nomf(CellTime):
+	t_nom = np.zeros(NROI)
+	for i in range(NROI):
+		t_nom[i] = CellTime[i+1]-CellTime[i]
+		
+	return t_nom	
+	
+		
+
+# find negative-going crossings of mean value
+def Crossing(Data, Mean, t_nom, Start, Chip):
+	TimeXing = np.zeros(NROI)
+	MeanXing = np.zeros(NROI)
+	NumXing = 0
+	CellTime = CellTimef(np.roll(t_nom, -Start))
+	
+	for i in range(NROI-1):
+		if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+			MeanXing[NumXing] = i
+			h.list[Chip].dict["lomeanxing"].Fill(np.mod(i+Start, NROI))
+			
+			FirstCell = CellTime[i]
+			SecondCell = CellTime[i+1]
+			
+			TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+			NumXing += 1
+			
+	# calculate the frequency of the testwave
+	freqp = fsampling*NumXing*1000./1024
+	h.list[Chip].dict["meanfreq"].Fill(freqp)
+	if(np.abs(freqp-freq) > 20):
+		print "This is not a timing calibration run!!"
+		raise SystemExit
+						
+	return MeanXing, TimeXing, NumXing
+	
+	
+	
+# find positive-going crossings of mean value
+def CrossingPos(Data, Mean, t_nom, Start, Chip):
+	TimeXing = np.zeros(NROI)
+	MeanXing = np.zeros(NROI)
+	NumXing = 0
+	CellTime = CellTimef(np.roll(t_nom, -Start))
+	
+	for i in range(NROI-1):
+		if ((Data[i] < Mean) & (Data[i+1] > Mean)):
+			MeanXing[NumXing] = i
+			h.list[Chip].dict["lomeanxing"].Fill(np.mod(i+Start, NROI))
+			
+			FirstCell = CellTime[i]
+			SecondCell = CellTime[i+1]
+			
+			TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+			NumXing += 1
+			
+	# calculate the frequency of the testwave
+	freqp = fsampling*NumXing*1000./1024
+	h.list[Chip].dict["meanfreq"].Fill(freqp)
+	if(np.abs(freqp-freq) > 20):
+		print "This is not a timing calibration run!!"
+		raise SystemExit
+						
+	return MeanXing, TimeXing, NumXing
+	
+
+	
+def CrossingOliver(Data, Mean, t_nom, Start, Chip):
+
+	TimeXing = np.zeros(NROI)
+	MeanXing = np.zeros(NROI)
+	NumXing = 0
+	CellTime = CellTimef(t_nom)
+	
+	
+	for i in range(NROI-1):
+		if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+			MeanXing[NumXing] = np.mod(i+Start, NROI)
+			
+			FirstCell = CellTime[np.mod(i+Start,NROI)]
+			SecondCell = CellTime[np.mod(i+1+Start, NROI)]
+			
+			if(SecondCell < FirstCell): SecondCell =+ NROI/fsampl
+			
+			TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+			NumXing += 1
+			
+	# calculate the frequency of the testwave
+	freqp = fsampling*NumXing*1000./1024
+	h.list[Chip].dict["meanfreq"].Fill(freqp)
+	if(np.abs(freqp-freq) > 20):
+		print "This is not a timing calibration run!!"
+		raise SystemExit		
+				
+	return MeanXing, TimeXing, NumXing
+
+# Determine time between crossings and apply correction
+def Corr(MeanXing, NumXing, TimeXing, t_nom, Start, Chip, Event):
+
+	Ctot = np.zeros(NumXing)
+
+	# loop over mean crossings
+	for i in range(int(NumXing-1)):
+		I1 = MeanXing[i]
+		I2 = MeanXing[i+1]
+		Period = TimeXing[i+1] - TimeXing[i]
+		if(Event > 300):
+			h.list[Chip].dict["period"].Fill(Period)
+		
+		
+		if(np.abs(1-Period/P_nom) > 0.2):
+			#print "Skipping zero crossing due to too large deviation of period."
+			h.list[Chip].dict["loleftout"].Fill(MeanXing[i])
+			h.list[Chip].dict["leftout"].Fill(np.mod(MeanXing[i]+Start, NROI))
+			continue
+			
+		h.list[Chip].dict["lomeanxing"].Fill(MeanXing[i])
+		h.list[Chip].dict["meanxing"].Fill(np.mod(MeanXing[i]+Start, NROI))
+		MeanXingHist[np.mod(MeanXing[i]+Start, NROI)] += 1
+		
+		# calculate the frequency of the testwave
+		freqp = 1000./Period
+		h.list[Chip].dict["freq"].Fill(freqp)
+		
+		C = (P_nom-Period)*DAMPING
+		Ctot[i] = P_nom-Period
+
+		numnom = Period*fsampling
+		Correction = np.zeros(NROI)
+		Correction[I1+1:I2+1] += C/numnom
+		Correction[:I1+1] -= (I2-I1)*C/(numnom*(NROI-(I2-I1)))
+		Correction[I2+1:] -= (I2-I1)*C/(numnom*(NROI-(I2-I1)))
+		"""
+		Correction = np.zeros(NROI)
+		Correction[I1+1:I2+1] += C/(I2-I1)
+		Correction[:I1+1] -= C/(NROI-(I2-I1))
+		Correction[I2+1:] -= C/(NROI-(I2-I1))
+
+		numnom = Period*fsampling
+		Correction = np.zeros(NROI)
+		Correction[I1+1:I2+1] += C/numnom
+		"""
+			
+		Correction = np.roll(Correction, Start)
+		t_nom += Correction
+	#t_nom = t_nom/np.sum(t_nom)*1024/fsampling
+	
+	if(Event > 200):
+		h.list[Chip].dict["perioddevproj"].Fill(np.average(Ctot))
+	h.list[Chip].dict["perioddev"].SetBinContent(Event+1, np.abs(np.average(Ctot)))	
+	h.list[Chip].dict["lomeanxing"].Fill(MeanXing[NumXing])
+	h.list[Chip].dict["meanxing"].Fill(np.mod(MeanXing[NumXing]+Start, NROI))
+	MeanXingHist[np.mod(MeanXing[NumXing]+Start, NROI)] += 1
+		
+			
+	return t_nom
+
+def CorrOliver(MeanXing, NumXing, TimeXing, t_nom, Start, Chip):
+	
+	CellTime = CellTimef(t_nom)
+	for i in range(int(NumXing-1)):
+		I1 = MeanXing[i]
+		I2 = MeanXing[i+1]
+		Period = TimeXing[i+1] - TimeXing[i]
+		if(Period<0): Period += NROI/fsampling
+		h.list[Chip].dict["period"].Fill(Period)
+		
+		if(np.abs(1-Period/P_nom) > 0.2):
+			print "Skipping zero crossing due to too large deviation of period."
+			h.list[Chip].dict["loleftout"].Fill(MeanXing[i])
+			h.list[Chip].dict["leftout"].Fill(np.mod(MeanXing[i]+Start, NROI))
+			continue
+			
+		# calculate the frequency of the testwave
+		freqp = 1000./Period
+		h.list[Chip].dict["freq"].Fill(freqp)
+		
+			
+		h.list[Chip].dict["lomeanxing"].Fill(MeanXing[i])
+		h.list[Chip].dict["meanxing"].Fill(np.mod(MeanXing[i]+Start, NROI))
+		MeanXingHist[np.mod(MeanXing[i]+Start, NROI)] += 1
+			
+		Correction = (P_nom-Period)*DAMPING
+		
+		# Positive correction from MeanXing[i] to MeanXing[i+1]
+		# and negative to all others
+		Time = 0
+		for j in range(NROI):
+			diff = CellTime[j+1] - CellTime[j]
+			if ((I2>I1 and j>I1 and j<=I2) or (I2<I1 and (j<=I2 or j>I1))):
+				diff += Correction/np.mod(I2-I1+NROI, NROI)
+			else:
+				diff -= Correction/(NROI-np.mod(I2-I1+NROI, NROI))
+			CellTime[j] = Time
+			Time += diff
+		CellTime[NROI] = Time
+
+	t_nom = t_nomf(CellTime)
+	
+	h.list[Chip].dict["lomeanxing"].Fill(MeanXing[NumXing-1])	
+	h.list[Chip].dict["meanxing"].Fill(np.mod(MeanXing[NumXing-1]+Start, NROI))
+	
+	return t_nom
+	
+	
+# create array to put the calibrated times for each cell in
+t_nom = np.ones(NROI)/fsampling
+# loop over events
+for Event in range( NEvents ):
+	print "Event ", Event
+	Chip = 0
+	
+	# loop over every 9th channel in Chip
+	for Pix in range(NPix)[8::9]:
+		print "Pix ", Pix
+		
+		# calculate t_nom for each pixel
+		if(alg == "Remo"):
+			MeanXing, TimeXing, NumXing = Crossing(Data[Event], np.average(Data[Event]), t_nom, int(Start[Event]), Chip)
+			h.list[Chip].dict["numxing"].Fill(NumXing)
+			t_nom = Corr(MeanXing, NumXing, TimeXing, t_nom, int(Start[Event]), Chip, Event)
+			h.list[Chip].dict["conv"].SetBinContent(Event+1, np.average(np.abs(t_nom-t_true)))
+			
+
+		if(alg == "Oliver"):
+			MeanXing, TimeXing, NumXing = CrossingOliver(Data[Event], np.average(Data[Event]), t_nom, int(Start[Event]), Chip)
+			h.list[Chip].dict["numxing"].Fill(NumXing)
+			t_nom = CorrOliver(MeanXing, NumXing, TimeXing, t_nom, int(Start[Event]), Chip)
+			h.list[Chip].dict["conv"].SetBinContent(Event+1, np.average(np.abs(t_nom-t_true)))
+		
+		Chip += 1
+
+CellTime = CellTimef(t_nom)
+
+print "t_tot = ", np.sum(t_nom)	
+
+if(save == "yes"):
+	np.save(str(alg)+"_"+str(variante)+"_"+str(NEvents)+"x"+str(NChip)+str(tag), CellTime)
+	np.savetxt(str(alg)+"_"+str(variante)+"_"+str(NEvents)+"x"+str(NChip)+str(tag), CellTime[None], fmt="%7.4f")
+
+
+for Chip in range(NChip):
+	for i in range(NROI):
+		h.list[Chip].dict["int_dev"].SetBinContent(i+1, CellTime[i]-i/fsampling)
+		h.list[Chip].dict["diff_dev"].SetBinContent(i+1, t_nom[i]-1/fsampling)
+		h.list[Chip].dict["data"].SetBinContent(i+1, Data[-1][i])
+	
+pyfact.SaveHistograms(h.list, "tcal_"+str(alg)+"_"+str(variante)+str(tag)+".root", "RECREATE")
+
+
+t_f = time()
+print "vergangene Zeit	= ",  (t_f - t_s)/60., " min."
+	
+print "Variante		= ", variante
+print "Algorithmus		= ", alg
+print "Speichern		= ", save
+print "NChip			= ", NChip
+print "NEvents			= ", NEvents
+print "Histo saved as		= ", "tcal_"+str(alg)+"_"+str(variante)+str(tag)+".root"
+if(save == "yes"):
+	print "CellTime saved as	= ", str(alg)+"_"+str(variante)+"_"+str(NEvents)+"x"+str(NChip)+str(tag)+".npy"
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/hist.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/hist.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/hist.py	(revision 13368)
@@ -0,0 +1,166 @@
+#!/usr/bin/python
+#
+# Werner Lustermann
+# ETH Zurich
+#
+
+from ROOT import TH1F, TObjArray
+
+import numpy as np
+
+
+class histogramList( object ):
+    """
+    """
+    def __init__( self, name ):
+        """ set the name and create empty lists """
+        self.name  = name         # name of the list
+        self.list  = []           # list of the histograms
+        self.dict  = {}           # dictionary of histograms
+        self.hList = TObjArray()  # list of histograms for ROOT
+
+        self.book()
+
+    def __del__(self):
+        for h in self.list:
+            h.Delete()
+        
+    def add( self, tag, h ):
+        self.list.append( h )
+        self.dict[tag] = h
+        self.hList.Add( h )
+        
+    def h1d( self, name, title, Nbin, first, last, xtitle, ytitle ):
+
+        h = TH1F( name, title, Nbin, first, last );
+        h.GetXaxis().SetTitle( xtitle );
+        h.GetYaxis().SetTitle( ytitle );
+
+        # print 'self.add( ',name, ',', h, ' )'
+        self.add( name, h )
+        
+    def book( self ):
+        # print 'histogramList::book'
+        pass
+    
+
+class hist( object ):
+    """
+    Use numpy histogram function for histogram filling
+    Allows multiple calls for a  single histogram
+    Create ROOT histo 
+    """
+    def __init__( self, nbins, first, last, name = 'name', title = 'title',
+                  xtitle = 'x', ytitle = 'y' ):
+
+        self.nbins  = nbins
+        self.first  = first
+        self.last   = last
+        self.name   = name
+        self.title  = title
+        self.xtitle = xtitle
+        self.ytitle = ytitle
+        self.y = np.zeros( self.nbins ) # initialize all bins to ZERO
+
+        self.rh = 0
+        
+    def fill( self, x ):
+        h, b = np.histogram( x, self.nbins, self.first, self.last,
+                             new = True )
+        self.y += h
+
+    def zero( self ):
+        self.y[:] = 0.
+        
+    def mkroot( self ):
+        
+        if self.rh == 0:
+            self.rh = TH1F( self.name, self.title, self.nbins,
+                   self.first, self.last )
+            self.rh.GetXaxis().SetTitle( xtitle );
+            self.rh.GetYaxis().SetTitle( ytitle );
+
+        for i in range( self.nbins ):
+            self.rh.SetBinContent( i+1, self.y[i] )
+        
+        
+class hist_array( object ):
+    """
+    """
+    def __init__( self, nhist, nbins, first, last, name = 'name',
+                  title = 'title', xtitle = 'x', ytitle = 'y', root = True):
+        """
+        - store the histogram info in the object
+        - initialize data array
+        """
+        self.nhist  = nhist 
+        self.nbins  = nbins
+        self.first  = first
+        self.last   = last
+        self.name   = name
+        self.title  = title
+        self.xtitle = xtitle
+        self.ytitle = ytitle
+        
+        t = np.zeros( self.nhist * self.nbins ) # initialize array ZERO
+        self.y = t.reshape( nhist, nbins ) # shape array of histograms
+
+        if root == True:
+            self.bookRootHistos()
+        else:
+            self.list = 0
+
+    def __del__(self):
+        for h in self.list:
+            h.Delete()
+            
+    def fill( self, hist, x ):
+        h, b = np.histogram( x, self.nbins, ( self.first, self.last ),
+                             new = True )
+        self.y[ hist ] += h
+
+    def fillall( self, x ):
+        for hist in range( self.nhist ):
+            self.fill( hist, x[hist, :] )
+
+    def zero( self ):
+        self.y.flat[:] = 0.
+        
+    def bookRootHistos( self ):
+
+        self.list  = [ x for x in range( self.nhist ) ]
+        self.hList = TObjArray()
+        
+        for hist in range( self.nhist ):
+
+            hname  = self.name + ' ' + str( hist )
+            htitle = self.title + ' ' + str( hist )
+            self.list[hist] = TH1F( hname, htitle,
+                                    self.nbins, self.first, self.last )
+            
+            self.list[hist].GetXaxis().SetTitle( self.xtitle )
+            self.list[hist].GetYaxis().SetTitle( self.ytitle )
+            self.hList.Add( self.list[hist] )
+
+    def SetBinContent( self ):
+
+        if self.list == 0:
+            print 'no root histograms booked'
+        else:
+            for hist in range( self.nhist ):        
+                for i in range( self.nbins ):
+                    self.list[hist].SetBinContent( i+1, self.y[hist, i] )
+		    
+		    
+class hist_list( object ):
+
+	def __init__( self, booking, nhist, hname ):
+	
+		self.list = []
+	
+		for i in range(nhist):
+			self.list.append( booking( str( hname )+"_"+str(i+1)))
+
+    
+        
+        
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/jitter.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/jitter.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/jitter.py	(revision 13368)
@@ -0,0 +1,114 @@
+# Programm zur Jitter-Bestimmung
+#
+#
+# Remo Dietlicher
+# ETH Zürich
+#
+#
+
+
+import pyfact
+from myhisto import *
+from hist import *
+import numpy as np
+import numpy.random as rnd
+from scipy import interpolate as ip
+from ROOT import *
+from time import time
+from optparse import OptionParser
+
+jitterSummary = jitterHistograms( "jitter" )
+
+
+Data0 = np.loadtxt("20120106T162310_ch0.txt")
+
+NEvents, NROI = np.shape(Data0)
+
+Start0 = np.loadtxt("20120106T162310_start_ch0.txt")
+
+Data8 = np.loadtxt("20120106T162310_ch8.txt")
+
+Start8 = np.loadtxt("20120106T162310_start_ch8.txt")
+
+for i in range(NROI):
+	jitterSummary.dict["data0"].SetBinContent(i+1, Data0[10][i])
+	jitterSummary.dict["data8"].SetBinContent(i+1, Data8[10][i])
+
+Thresh = 250
+
+#rCellTime = np.load("Remo_dat_1000x15123.npy")
+rCellTime = np.load("CellTimeOliver.npy")
+
+
+def Crossing(Data):
+
+	TimeXing = "gugus"
+	CellTime = np.roll(rCellTime, -int(Start0[Event][0]))
+
+	for i in range(NROI-1):
+
+		if((Data[i] < Thresh) & (Data[i+1] > Thresh) & (Data[np.mod(i+300, NROI)] > Thresh)):
+
+			FirstCell = CellTime[i]
+			SecondCell = CellTime[i+1]
+
+			TimeXing = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Thresh/(Data[i]))
+
+	return TimeXing
+
+Diff = np.zeros(NEvents)
+
+count = 0
+
+for Event in range(NEvents):
+	print Event
+
+	Time0 = Crossing(Data0[Event])
+	Time8 = Crossing(Data8[Event])
+
+	if((Time0 == "gugus") or (Time8 == "gugus")):
+		count += 1
+		continue
+
+	Diff[Event] = Time0 - Time8
+	jitterSummary.dict["diff"].Fill(Diff[Event])
+
+pyfact.SaveHistograms([jitterSummary], "Jitter_Histo_Oliver.root", "RECREATE")
+
+print "Number of skipped events: ", count
+print "Histo saved as	= ", "Jitter_Histo.root"
+
+
+
+	
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+	
+	
+	
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/myhisto.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/myhisto.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/myhisto.py	(revision 13368)
@@ -0,0 +1,225 @@
+#!/usr/bin/python
+#
+# Werner Lustermann
+# ETH Zurich
+#
+from ROOT import TH1F
+import pyfact
+
+from hist import histogramList
+
+class bslHistograms( histogramList ):
+
+    def book( self ):
+
+        # print 'bslHistograms::book'
+        
+        self.h1d( "histo_mean","Value of maximal probability",
+                  400, -99.5, 100.5, 'max value (mV)', 'Entries / 0.5 mV')
+
+        self.h1d("hplt_mean","Value of maximal probability",
+            1440, -0.5, 1439.5, 'max value in mV', 'Entries / 0.5 mV')
+    
+        self.h1d("histo_rms","RMS in mV",
+                 2000, -99.5, 100.5, 'RMS (mV)', 'Entries / 0.5 mV' )
+
+        self.h1d( 'hplt_rms', 'Value of maximal probability',
+                   1440, -0.5, 1439.5, 'pixel', 'RMS in mV' )				   
+
+def H1d( name, title, Nbin, first, last, xtitle, ytitle ):
+
+    h = TH1F( name, title, Nbin, first, last );
+    h.GetXaxis().SetTitle( xtitle );
+    h.GetYaxis().SetTitle( ytitle );
+
+    return h
+
+
+def BookBslSummaryHistos( list ):
+
+    list.add( 'hbsl_mean', H1d( "histo_mean","Value of maximal probability",
+                               400, -99.5, 100.5,
+                               'max value (mV)', 'Entries / 0.5 mV') )
+
+    h = H1d("hplt_mean","Value of maximal probability",
+            1440, -0.5, 1439.5, 'max value in mV', 'Entries / 0.5 mV')
+    list.add( 'hplt_mean', h )
+
+    h = H1d("histo_rms","RMS in mV",2000,-99.5,100.5, 'RMS (mV)', 'Entries / 0.5 mV' )
+    list.add( 'hbsl_rms', h )
+
+    list.add( 'hplt_rms',
+              H1d( 'hplt_rms', 'Value of maximal probability',
+                   1440,-0.5,1439.5, 'pixel', 'RMS in mV' ) )
+				   
+class tcalHistograms( histogramList ):
+
+	def book( self ):
+			
+		self.h1d("diff_dev", "Differential Deviation", 1024, 0, 1024, 
+		          "Bin in DRS4 pipeline", "Deviation per bin in ns")
+		
+		self.h1d("int_dev", "Integral Deviation", 1024, 0, 1024, 
+		          "Bin in DRS4 pipeline", "Total Deviation in ns")
+			  
+		self.h1d("start", "distribution of startcells", 1024, 0, 1024,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("numxing", "number of crossings", 200, 0, 200,
+		          "number of crossings", "counts")
+			  
+		self.h1d("lomeanxing", "Distribution of logic Meancrossing", 1024, 0, 1024,
+		          "Cell of Meancrossing", "counts")
+			  
+		self.h1d("meanxing", "Distribution of physical Meancrossing", 1024, 0, 1024,
+		          "Cell of Meancrossing", "counts")
+			  
+		self.h1d("corr", "Distribution of positive Corrections", 1024, 0, 1024,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("loleftout", "Distribution of left out Crossings", 1024, 0, 1024,
+			  "Bin in DRS4 pipeline", "counts")
+		  
+		self.h1d("leftout", "Distribution of left our Crossings", 1024, 0, 1024,
+		 	  "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("period", "Distribution of Period lenght", 10, 0, 10,
+			  "number of cells within Period", "counts")
+			  
+		self.h1d("freq", "calculated Frequency of the Testwave", 100, 200, 300,
+			  "Frequenz", "counts")
+			  
+			  
+class tcalsimHistograms( histogramList ):
+
+	def book( self ):
+	
+		self.h1d("diff_dev", "Differential Deviation", 1024, 0., 1024., 
+		          "Bin in DRS4 pipeline", "Deviation per bin in ns")
+		
+		self.h1d("int_dev", "Integral Deviation", 1024, 0., 1024., 
+		          "Bin in DRS4 pipeline", "Total Deviation in ns")
+			  
+		self.h1d("start", "distribution of startcells", 1024, 0., 1024.,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("numxing", "number of crossings", 200, 0, 200,
+		          "number of crossings", "counts")
+			  
+		self.h1d("lomeanxing", "Distribution of logic Meancrossing", 1024, 0, 1024,
+		          "Cell of Meancrossing", "counts")
+			  
+		self.h1d("meanxing", "Distribution of physical Meancrossing", 1024, 0, 1024,
+		          "Cell of Meancrossing", "counts")
+			  
+		self.h1d("loleftout", "Distribution of left out Crossings", 1024, 0, 1024,
+			  "Bin in DRS4 pipeline", "counts")
+		  
+		self.h1d("leftout", "Distribution of left our Crossings", 1024, 0, 1024,
+		 	  "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("period", "Distribution of Period lenght", 1000, 2.5, 5.5,
+			  "leght of Period", "counts")
+			  
+		self.h1d("data", "simulated Data", 1024, 0, 1024,
+			  "Bin in DRS4 pipeline", "Data")
+			  
+		self.h1d("freq", "calculated Frequency of the Testwave", 100, 200, 300,
+			  "Frequenz", "counts")
+			  
+		self.h1d("meanfreq", "mean calculated Frequency of the Testwave", 100, 200, 300,
+			  "Frequenz", "counts")
+			  
+		self.h1d("conv", "Convergence of the Deviation", 200, 0, 200,
+			  "Event", "Deviation from the true value")
+			  
+		self.h1d("perioddev", "Mean deviation from nominal Period for each Iteration", 1000, 0, 1000,
+			  "number of iterations", "mean deviation")
+			  
+		self.h1d("perioddevproj", "Distribution of mean nominal Periods for each Iteration", 1000, -0.2, 0.2,
+			  "Deviation of Period", "Counts")
+			  
+class tcalanaHistograms( histogramList ):
+
+	def book( self ):
+			  
+		self.h1d("int_dev", "Integral Deviation", 1024, 0, 1024,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("diff_dev", "Differential Deviation", 1024, 0, 1024,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("diff_dev_true", "True Differential Deviation", 1024, 0, 1024,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("int_dev_true", "True Integral Deviation", 1024, 0, 1024,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+		self.h1d("data", "simulierte Daten", 1024, 0, 1024,
+			  "Bin in DRS4 pipeline", "Data")
+			  
+		self.h1d("start", "distribution of startcells", 1024, 0., 1024.,
+		          "Bin in DRS4 pipeline", "counts")
+			  
+			  
+class periodHistograms( histogramList ):
+
+	def book ( self ):
+	
+		self.h1d("periods", "Lenght of Periods", 2000, 0, 10,
+			   "Period in ns", "counts")
+			   
+		self.h1d("diff", "PeriodOliver - PeriodRemo", 100, -0.3, 0.3,
+			   "difference in ns", "counts")
+			   
+class amplitudesHistograms( histogramList ):
+
+	def book ( self ):
+			
+		self.h1d("std", "Standard deviation", 20, 0, 20,
+			   "Standardabweichung in mV", "count")
+			   
+
+class jitterHistograms( histogramList ):
+
+	def book ( self ):
+	
+		self.h1d("diff", "Jitter Calculation", 500, 200, 230,
+			   "difference between pulses in ns", "bin content")
+			   
+			   
+		self.h1d("data0", "Data", 1024, 0, 1024,
+			  "Bin in DRS4 pipeline", "Data")
+			  
+		self.h1d("data8", "Data", 1024, 0, 1024,
+			  "Bin in DRS4 pipeline", "Data")
+			  
+class ftcalappHistograms( histogramList ):
+	
+	def book ( self ):
+	
+		self.h1d("avperiods", "distribution of periods for average data", 1000, 2, 6,
+			   "period in ns", "bin content")
+			   
+		self.h1d("avperiods0", "distribution of periods at beginning for average data", 400, 2, 6,
+			   "period in ns", "bin content")
+			   
+		self.h1d("periods", "distribution of periods", 400, 2, 6,
+			   "period in ns", "bin content")
+			   
+		self.h1d("periods0", "distribution of periods at beginning", 400, 2, 6,
+			   "period in ns", "bin content")
+			   
+		self.h1d("perioddev", "Mean deviation from nominal Period for each Iteration", 1000, 0, 1000,
+			  "number of iterations", "mean deviation")
+			  
+		self.h1d("perioddevproj", "Distribution of mean nominal Periods for each Iteration", 1000, -0.2, 0.2,
+			  "Deviation of Period", "Counts")
+			   
+		self.h1d("int_dev", "integral deviation", 1024, 0., 1024., 
+		          "bin in DRS4 pipeline", "total deviation in ns")
+		
+	
+
+              
+
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/periods2.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/periods2.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/periods2.py	(revision 13368)
@@ -0,0 +1,67 @@
+# Programm zur Bestimmung der Genauigkeit des konvergierten Algorithmus
+#
+# Remo Dietlicher
+#
+#
+
+from optparse import OptionParser
+import pyfact
+from myhisto import *
+from hist import *
+import numpy as np
+import numpy.random as rnd
+from ROOT import *
+from time import time
+
+def periods2(Data, CellTime, h):
+
+	NROI  = 1024 				# length of the DRS pipeline
+	fsampling = 2. 				# sampling frequency
+	freq = 250.        			# testfrequency
+	P_nom = 1000./freq 			# nominal Period due to testfrequency
+
+
+	nomCellTime = np.linspace(0., 1024., 1025)/fsampling
+
+
+	# FUNCTION TO DETERMINE CROSSINGS
+
+	def Crossing(Mean, rCellTime):
+		TimeXing = np.zeros(NROI)
+		MeanXing = np.zeros(NROI)
+		NumXing = 0
+
+		for i in range(NROI-1):
+			if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+				MeanXing[NumXing] = i
+
+				FirstCell = rCellTime[i]
+				SecondCell = rCellTime[i+1]
+
+				TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+				NumXing += 1
+
+		return MeanXing, TimeXing
+
+	def CalculatePeriods(rCellTime, name):
+
+		Period = np.zeros(126)
+
+		MeanXing, TimeXing = Crossing(np.average(Data), rCellTime)
+
+		for i in range(int(126)):
+			Period[i] = TimeXing[i+1] - TimeXing[i]
+
+		for val in Period:
+			h.dict[str(name)].Fill(val)
+
+		return
+
+	CalculatePeriods(CellTime, "avperiods")
+	CalculatePeriods(nomCellTime, "avperiods0")
+	
+	return
+
+
+
+
Index: /fact/tools/pyscripts/sandbox/dneise/rdietlic/pyfact.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/rdietlic/pyfact.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/rdietlic/pyfact.py	(revision 13368)
@@ -0,0 +1,224 @@
+#!/usr/bin/python2.6
+#
+# Werner Lustermann
+# ETH Zurich
+#
+from ctypes import *
+
+# get the ROOT stuff + my shared libs
+from ROOT import gSystem
+gSystem.Load('../troot/fitslib.so')
+from ROOT import *
+
+import numpy as np
+
+
+class rawdata( object ):
+    """
+    raw data access and calibration
+    """
+    def __init__( self, dfname,  calfname ):
+        """
+        open data file and calibration data file
+        get basic information about the data inf dfname
+        allocate buffers for data access
+
+        dfname   - fits or fits.gz file containing the data including the path
+        calfname - fits or fits.gz file containing DRS calibration data
+        """
+        self.dfname   = dfname
+        self.calfname = calfname
+        
+        # access data file
+        try:
+            df = fits( self.dfname )
+        except IOError:
+            print 'problem accessing data file: ', dfname
+            raise # stop ! no data
+        self.df = df
+        
+        # get basic information about the data file
+        self.NROI    = df.GetUInt( 'NROI' ) # region of interest (length of DRS pipeline read out)
+        self.NPIX    = df.GetUInt( 'NPIX' ) # number of pixels (should be 1440)
+        self.NEvents = df.GetNumRows()      # find number of events
+        # allocate the data memories
+        self.evNum = c_ulong()
+        self.Data  = np.zeros( self.NPIX * self.NROI, np.int16 ) 
+        self.startCells = np.zeros( self.NPIX, np.int16 )
+        # set the pointers to the data++
+        df.SetPtrAddress( 'EventNum', self.evNum )
+        df.SetPtrAddress( 'StartCellData', self.startCells ) # DRS readout start cell
+        df.SetPtrAddress( 'Data', self.Data ) # this is what you would expect
+        # df.GetNextRow() # access the first event
+        
+        # access calibration file
+        try:
+            calf = fits( self.calfname )
+        except IOError:
+            print 'problem accessing calibration file: ', calfname
+            raise
+        self.calf = calf
+        #
+        BaselineMean      = calf.GetN('BaselineMean')
+        GainMean          = calf.GetN('GainMean')
+        TriggerOffsetMean = calf.GetN('TriggerOffsetMean')
+
+        self.blm = np.zeros( BaselineMean, np.float32 )
+        self.gm  = np.zeros( GainMean, np.float32 )
+        self.tom = np.zeros( TriggerOffsetMean, np.float32 )
+
+        self.Nblm = BaselineMean / self.NPIX
+        self.Ngm  = GainMean / self.NPIX
+        self.Ntom  = TriggerOffsetMean / self.NPIX
+
+        calf.SetPtrAddress( 'BaselineMean', self.blm )
+        calf.SetPtrAddress( 'GainMean', self.gm )
+        calf.SetPtrAddress( 'TriggerOffsetMean', self.tom )
+        calf.GetRow(0)
+
+        self.v_bsl = np.zeros( self.NPIX ) # array with baseline values (all ZERO)
+
+    def next( self ):
+        """
+        load the next event from disk and calibrate it
+        """
+        self.df.GetNextRow()
+        self.calibrate_drsAmplitude()
+
+        
+    def calibrate_drsAmplitude( self ):
+        """
+        perform amplitude calibration for the event 
+        """
+        tomV = 2000./4096.
+        acalData = self.Data * tomV # convert into mV
+
+        # reshape arrays: row = pixel, col = drs_slice
+        acalData = np.reshape( acalData, (self.NPIX, self.NROI) )
+        blm = np.reshape( self.blm, (self.NPIX, self.NROI) )
+        tom = np.reshape( self.tom, (self.NPIX, self.NROI) )
+        gm  = np.reshape( self.gm,  (self.NPIX, self.NROI) )
+        
+        # print 'acal Data ', acalData.shape
+        # print 'blm shape ', blm.shape
+        # print 'gm shape  ', gm.shape
+        
+        for pixel in range( self.NPIX ):
+            # rotate the pixel baseline mean to the Data startCell
+            blm_pixel = np.roll( blm[pixel,:], -self.startCells[pixel] )
+            acalData[pixel,:] -= blm_pixel[0:self.NROI]
+            acalData[pixel,:] -= tom[pixel, 0:self.NROI]
+            acalData[pixel,:] /= gm[pixel,  0:self.NROI]
+            
+	self.acalData = acalData * 1907.35
+    
+        # print 'acalData ', self.acalData[0:2,0:20]
+
+    def ReadBaseline( self, file, bsl_hist = 'bsl_sum/hplt_mean' ):
+        """
+        open ROOT file with baseline histogram and read baseline values
+        file       name of the root file
+        bsl_hist   path to the histogram containing the basline values
+        """
+        try:
+            f = TFile( file )
+        except:
+            print 'Baseline data file could not be read: ', file
+            return
+        
+        h = f.Get( bsl_hist )
+
+        for i in range( self.NPIX ):
+            self.v_bsl[i] = h.GetBinContent( i+1 )
+
+        f.Close()
+
+        
+    def CorrectBaseline( self ):
+        """
+        apply baseline correction
+        """
+        for pixel in range( self.NPIX ):
+            self.acalData[pixel,:] -= self.v_bsl[pixel]
+            
+        
+    def info( self ):
+        """
+        print information
+        """
+        print 'data file:  ', dfname
+        print 'calib file: ', calfname
+        print '\ncalibration file'
+        print 'N BaselineMean: ', self.Nblm
+        print 'N GainMean: ', self.Ngm
+        print 'N TriggeroffsetMean: ', self.Ntom
+        
+
+class histogramList( object ):
+
+    def __init__( self, name ):
+        """ set the name and create empty lists """
+        self.name  = name         # name of the list
+        self.list  = []           # list of the histograms
+        self.dict  = {}           # dictionary of histograms
+        self.hList = TObjArray()  # list a la ROOT of the histograms
+
+    def add( self, tag, h ):
+        self.list.append( h )
+        self.dict[tag] = h
+        self.hList.Add( h )
+
+
+class pixelHisto1d ( object ):
+
+    def __init__( self, name, title, Nbin, first, last, xtitle, ytitle, NPIX ):
+        """
+        book one dimensional histograms for each pixel
+        """
+        self.name = name
+
+        self.list = [ x for x in range( NPIX ) ]
+        self.hList = TObjArray()
+
+        for pixel in range( NPIX ):
+
+            hname  = name + ' ' + str( pixel )
+            htitle = title + ' ' + str( pixel )
+            self.list[pixel] = TH1F( hname, htitle, Nbin, first, last )
+
+            self.list[pixel].GetXaxis().SetTitle( xtitle )
+            self.list[pixel].GetYaxis().SetTitle( ytitle )
+            self.hList.Add( self.list[pixel] )
+
+
+def SaveHistograms( histogramLists, fname = 'histo.root', opt = 'RECREATE' ):
+    """
+    Saves all histograms in all given histogram lists to a root file
+    Each histogram list is saved to a separate directory
+    """
+    rf = TFile( fname, opt)
+    
+    for list in histogramLists:
+        rf.mkdir( list.name )
+        rf.cd( list.name )
+        list.hList.Write()
+
+    rf.Close()
+
+# simple test method
+if __name__ == '__main__':
+    """
+    create an instance
+    """
+    dfname = '/data03/fact-construction/raw/2011/11/24/20111124_121.fits'
+    calfname = '/data03/fact-construction/raw/2011/11/24/20111124_111.drs.fits'
+    rd = rawdata( dfname, calfname )
+    rd.info()
+    rd.next()
+    
+# for i in range(10):
+#    df.GetNextRow() 
+
+#    print 'evNum: ', evNum.value
+#    print 'startCells[0:9]: ', startCells[0:9]
+#    print 'evData[0:9]: ', evData[0:9]
Index: /fact/tools/pyscripts/sandbox/dneise/timecal.py
===================================================================
--- /fact/tools/pyscripts/sandbox/dneise/timecal.py	(revision 13368)
+++ /fact/tools/pyscripts/sandbox/dneise/timecal.py	(revision 13368)
@@ -0,0 +1,336 @@
+#!/usr/bin/python -tti
+#
+# calculation of DRS Time Calibration constants
+# initial implementation by Remo Dietlicher and Werner Lustermann (ETH Zurich)
+# based on C++ classes by Oliver Grimm (ETH Zurich)
+#
+# this python implementation was coded by
+# D. Neise (TU Dortmund) April 2012
+#
+import pyfact
+from myhisto import *
+from hist import *
+import numpy as np
+import numpy.random as rnd
+from scipy import interpolate as ip
+from ROOT import *
+from time import time
+from optparse import OptionParser
+
+
+from pyfact     import RawData
+from drs_spikes import DRSSpikes
+from extractors import ZeroXing
+class CalculateDrsTimeCalibrationConstants( object ):
+    """ basic analysis class for the calculation of DRS time aperture jitter calibration constants
+    """
+    
+    def __init__(self, dpath, cpath):
+        """
+        *dpath*: data file path, file should contain a periodic signal in eahc 9th channel
+        *cpath*: std DRS amplitude calibration path
+        """
+        #dfname = '/data00/fact-construction/raw/2011/11/24/20111124_113.fits.gz'
+        #calfname = '/data00/fact-construction/raw/2011/11/24/20111124_111.drs.fits.gz'
+        self.data_path = dpath
+        self.calib_path = cpath
+        self.run = RawData(dpath, cpath, return_dict = True)
+        self.despike = DRSSpikes()
+        self.zX = ZeroXing()
+        self.fsampling = 2.         # sampling frequency
+        self.freq = 250.            # testfrequency
+        self.P_nom = 1000./freq     # nominal Period due to testfrequency
+        self.DAMPING = 0.02         # Damping applied to correction
+        self.NChip = 1              # Number of Chips
+        self.NEvents = 300          # Number of Events to Process
+        
+        self.t_nom = np.ones([run.npix, run.nroi])/fsampling
+
+    def __call__(self):
+        """ the class-call method performs the actual calculation based on the input data
+            returns: tuple of length 160 containing tuples of length 1024 containing the length of each cell in ns
+            the class contains self.result after the call.
+        """        
+        # loop over events
+        for event in self.run:
+            data = despike( event['acal_data'] )
+            start_cells = event['start_cells']
+            MeanXing, TimeXing, NumXing = Calculate_Crossing(data, event['start_cells'])
+
+
+    def Calculate_Crossing(self, data, start_cells):
+        TimeXing = np.zeros(self.run.nroi)
+        MeanXing = np.zeros(self.run.nroi)
+        NumXing = 0
+        CellTime = CellTimef(np.roll(t_nom, -Start))
+        
+        for i in range(rd.NROI-1):
+            if (Data[i] > Mean) & (Data[i+1] < Mean):
+                MeanXing[NumXing] = i
+                
+                FirstCell = CellTime[i]
+                SecondCell = CellTime[i+1]
+                
+                TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/Data[i])
+                NumXing += 1
+        
+        # calculate the frequency of the testwave
+        freqp = fsampling*NumXing*1000./1024
+        if(np.abs(freqp-freq) > 20):
+            print "This is not a timecalibration run!!"
+            raise SystemExit
+                            
+        return MeanXing, TimeXing, NumXing
+
+        
+        for Event in range( NEvents ):
+            print "Event ", Event
+            Chip = 0
+
+            # loop over every 9th channel in Chip
+            for Pix in range(NPIX)[8::9]:
+                print "Pix ", Pix
+                
+                # calculate t_nom for each pixel
+                MeanXing, TimeXing, NumXing = Crossing(rd.acalData[Pix], np.average(rd.acalData[Pix]), t_nom[Pix], rd.startCells[Pix], Chip)
+                h.list[Chip].dict["numxing"].Fill(NumXing)
+                #h.list[Chip].dict["start"].Fill(rd.startCells[Pix])
+                t_nom[Pix] = Corr(MeanXing, NumXing, TimeXing, t_nom[Pix], rd.startCells[Pix], Chip)
+                
+                Chip += 1
+                
+            rd.next()
+
+        t_nom = t_nom[8::9]
+
+        if(save == "yes"):	
+            np.save(str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip), t_nom)
+            np.savetxt(str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip), t_nom)
+
+        for Chip in range(NChip):
+            for i in range(rd.NROI):
+                h.list[Chip].dict["int_dev"].SetBinContent(i+1, np.sum(t_nom[Chip][:i])-i*1/fsampling)
+                h.list[Chip].dict["diff_dev"].SetBinContent(i+1, np.sum(t_nom[Chip][i] - 1/fsampling))
+            
+        pyfact.SaveHistograms(h.list, str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip)+".root", "RECREATE")
+
+
+        t_f = time()
+
+
+
+        print "time ellapsed		= ",  (t_f - t_s)/60., " min."	
+        print "Speichern		= ", save
+        print "NChip			= ", NChip
+        print "NEvents			= ", NEventsp
+        print "Histo saved as		= ", str(dfname[-20:-8])+str(NEvents)+"x"+str(NChip)+".root"
+        if(save == "yes"):
+            print "CellTime saved as	= ", str(dfname[-20:-8])+str(NEvents)+"x"+str(NChip)+".npy"
+
+
+
+parser = OptionParser()
+parser.add_option('-e', '--extended', action = 'store_true', dest='extend', default = False )
+(options, args) = parser.parse_args()
+
+save = "yes"
+
+if(options.extend):
+
+	NChip = int(raw_input("Wieviele Chips? ([1, ... , "+str(rd.NPIX/9)+"]): "))
+	NEventsp = int(raw_input("Wieviele Events? ([1, ... , "+str(rd.NEvents)+"]) "))
+	save = raw_input("Soll CellTime gespeichert werden? (yes/no) ")
+	
+	
+h = hist_list(tcalHistograms, NChip, "Chip")
+
+
+# Functions needed for calibration
+
+# find negative-going crossings of mean value
+def Crossing(Data, Mean, t_nom, Start, Chip):
+	TimeXing = np.zeros(rd.NROI)
+	MeanXing = np.zeros(rd.NROI)
+	NumXing = 0
+	CellTime = CellTimef(np.roll(t_nom, -Start))
+	
+	for i in range(rd.NROI-1):
+		if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+			MeanXing[NumXing] = i
+			
+			FirstCell = CellTime[i]
+			SecondCell = CellTime[i+1]
+			
+			TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+			NumXing += 1
+	
+	# calculate the frequency of the testwave
+	freqp = fsampling*NumXing*1000./1024
+	if(np.abs(freqp-freq) > 20):
+		print "This is not a timecalibration run!!"
+		raise SystemExit
+						
+	return MeanXing, TimeXing, NumXing
+		
+def CrossingOliver(Data, Mean, t_nom, Start, Chip):
+	TimeXing = np.zeros(lpipe)
+	MeanXing = np.zeros(lpipe)
+	NumXing = 0
+	CellTime = CellTimef(t_nom)
+	
+	
+	for i in range(lpipe-1):
+		if ((Data[i] > Mean) & (Data[i+1] < Mean)):
+			MeanXing[NumXing] = np.mod(i+Start, lpipe)
+			
+			FirstCell = CellTime[np.mod(i+Start,lpipe)]
+			SecondCell = CellTime[np.mod(i+1+Start, lpipe)]
+			
+			if(SecondCell < FirstCell): SecondCell =+ lpipe/fsampl
+			
+			TimeXing[NumXing] = FirstCell+(SecondCell-FirstCell)/(1.-Data[i+1]/(Data[i]))*(1.-Mean/(Data[i]))
+			NumXing += 1
+			
+				
+	return MeanXing, TimeXing, NumXing
+
+# Determine time between crossings and apply correction
+def Corr(MeanXing, NumXing, TimeXing, t_nom, Start, Chip):
+
+	# loop over mean crossings
+	for i in range(int(NumXing-1)):
+		I1 = MeanXing[i]
+		I2 = MeanXing[i+1]
+		Period = TimeXing[i+1] - TimeXing[i]
+		
+		
+		if(np.abs(1-Period/P_nom) > 0.2):
+			#print "Skipping zero crossing due to too large deviation of period."
+			h.list[Chip].dict["loleftout"].Fill(MeanXing[i])
+			#h.list[Chip].dict["leftout"].Fill(np.mod((MeanXing[i]+Start), rd.NROI))
+			continue
+			
+		#h.list[Chip].dict["lomeanxing"].Fill(MeanXing[i])
+		h.list[Chip].dict["meanxing"].Fill(np.mod((MeanXing[i]+Start), rd.NROI))
+		
+		# calculate the frequency of the testwave
+		freqp = 1000./Period
+		h.list[Chip].dict["freq"].Fill(freqp)
+			
+		C = (P_nom-Period)*DAMPING
+		numnom = Period*fsampling
+		Correction = np.zeros(rd.NROI)
+		Correction[I1+1:I2+1] += C/numnom
+		Correction[:I1+1] -= (I2-I1)*C/(numnom*(rd.NROI-(I2-I1)))
+		Correction[I2+1:] -= (I2-I1)*C/(numnom*(rd.NROI-(I2-I1)))
+		
+		Correction = np.roll(Correction, Start)
+		t_nom += Correction
+	#h.list[Chip].dict["lomeanxing"].Fill(MeanXing[-1])
+	h.list[Chip].dict["meanxing"].Fill(np.mod((MeanXing[-1]+Start), rd.NROI))
+			
+	return t_nom
+	
+def CorrOliver(MeanXing, NumXing, TimeXing, t_nom, Start, Chip):
+	
+	CellTime = CellTimef(t_nom)
+	for i in range(int(NumXing-1)):
+		I1 = MeanXing[i]
+		I2 = MeanXing[i+1]
+		Period = TimeXing[i+1] - TimeXing[i]
+		if(Period<0): Period += rd.NROI/fsampl
+		
+		if(np.abs(1-Period/P_nom) > 0.2):
+			#print "Skipping zero crossing due to too large deviation of period."
+			continue
+			
+		Correction = (P_nom-Period)*DAMPING
+		
+		# Positive correction from MeanXing[i] to MeanXing[i+1]
+		# and negative to all others
+		Time = 0
+		for j in range(rd.NROI):
+			diff = CellTime[j+1] - CellTime[j]
+			if ((I2>I1 and j>I1 and j<=I2) or (I2<I1 and (j<=I2 or j>I1))):
+				diff += Correction/np.mod(I2-I1+rd.NROI, rd.NROI)
+			else:
+				diff -= Correction/(rd.NROI-np.mod(I2-I1+rd.NROI, rd.NROI))
+			CellTime[j] = Time
+			Time += diff
+		CellTime[rd.NROI] = Time
+	
+	print CellTime[-1]	
+	t_nom = t_nomf(CellTime)
+	
+	return t_nom
+	
+
+	
+# Summing up the nominal times for each cell to the total cell time.
+def CellTimef(t_nom):
+	CellTime = np.zeros(rd.NROI+1)
+	for i in range(rd.NROI):
+		CellTime[i+1] = CellTime[i] + t_nom[i]
+		
+	return CellTime
+	
+# getting t_nom from CellTime
+def t_nomf(CellTime):
+	t_nom = np.zeros(rd.NROI)
+	for i in range(rd.NROI):
+		t_nom[i] = CellTime[i+1]-CellTime[i]
+		
+	return t_nom
+	
+
+rd.next()
+NPIX = NChip*9
+NEvents = NEventsp
+
+# create array to put the calibrated times for each cell in
+t_nom = np.ones([NPIX, rd.NROI])/fsampling
+# loop over events
+for Event in range( NEvents ):
+	print "Event ", Event
+	Chip = 0
+
+	# loop over every 9th channel in Chip
+	for Pix in range(NPIX)[8::9]:
+		print "Pix ", Pix
+		
+		# calculate t_nom for each pixel
+		MeanXing, TimeXing, NumXing = Crossing(rd.acalData[Pix], np.average(rd.acalData[Pix]), t_nom[Pix], rd.startCells[Pix], Chip)
+		h.list[Chip].dict["numxing"].Fill(NumXing)
+		#h.list[Chip].dict["start"].Fill(rd.startCells[Pix])
+		t_nom[Pix] = Corr(MeanXing, NumXing, TimeXing, t_nom[Pix], rd.startCells[Pix], Chip)
+		
+		Chip += 1
+		
+	rd.next()
+
+t_nom = t_nom[8::9]
+
+if(save == "yes"):	
+	np.save(str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip), t_nom)
+	np.savetxt(str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip), t_nom)
+
+for Chip in range(NChip):
+	for i in range(rd.NROI):
+		h.list[Chip].dict["int_dev"].SetBinContent(i+1, np.sum(t_nom[Chip][:i])-i*1/fsampling)
+		h.list[Chip].dict["diff_dev"].SetBinContent(i+1, np.sum(t_nom[Chip][i] - 1/fsampling))
+	
+pyfact.SaveHistograms(h.list, str(dfname[-20:-8])+"_"+str(NEvents)+"x"+str(NChip)+".root", "RECREATE")
+
+
+t_f = time()
+
+
+
+print "time ellapsed		= ",  (t_f - t_s)/60., " min."	
+print "Speichern		= ", save
+print "NChip			= ", NChip
+print "NEvents			= ", NEventsp
+print "Histo saved as		= ", str(dfname[-20:-8])+str(NEvents)+"x"+str(NChip)+".root"
+if(save == "yes"):
+	print "CellTime saved as	= ", str(dfname[-20:-8])+str(NEvents)+"x"+str(NChip)+".npy"
+
