Print

Print


Author: [log in to unmask]
Date: Wed Sep 21 12:28:15 2016
New Revision: 4487

Log:
added time dependent ecal gains driver, also more gui debug stuff for the ecal pulse fitting.  and some user stuff as well

Added:
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains2016.java
    java/trunk/users/src/main/java/org/hps/users/spaul/EcalPeaks.java
Modified:
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPulseFitter.java
    java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPulseFitter.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPulseFitter.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/EcalPulseFitter.java	Wed Sep 21 12:28:15 2016
@@ -116,6 +116,10 @@
     private IPlotter plotter1;
     private IPlotter plotter2;
     private IHistogram1D chi2Hist;
+
+    private IHistogram1D pedestal_fit;
+    private IHistogram1D pedestal_prefit;
+    
     private IHistogram1D widthOffsetHist;
     private IHistogram1D widthOffsets[];
     private int skipHits = 100000;
@@ -128,14 +132,19 @@
             plotter1.show();
             this.plotter2 = aida.analysisFactory().createPlotterFactory().create();
             plotter2.createRegions(2,2);
-            chi2Hist = aida.histogram1D("chi2", 100, 0, 100);
+            chi2Hist = aida.histogram1D("chi2", 100, 0, 200);
             widthOffsetHist = aida.histogram1D("width offset", 200, -1, 1);
             widthOffsets = new IHistogram1D[442];
             for(int i = 0; i< 442; i++){
                 widthOffsets[i] = aida.histogram1D("width offset " + (i+1), 200, -1, 1);
             }
+            pedestal_prefit = aida.histogram1D("pedestal_prefit", 200, 0, 200);
+            pedestal_fit = aida.histogram1D("pedestal_fit", 200, 0, 200);
+            
             plotter2.region(0).plot(chi2Hist);
             plotter2.region(1).plot(widthOffsetHist);
+            plotter2.region(2).plot(pedestal_prefit);
+            plotter2.region(3).plot(pedestal_fit);
             plotter2.show();
         }
         else{
@@ -319,13 +328,15 @@
                     System.out.println("chi2\t" + chi2);
                     System.out.println("dof\t" + dof);
                     System.out.println("chi2 per dof\t" + chi2/dof);
+                    System.out.println("fit quality: " + fitResult.quality());
                 }
 
                 chi2Hist.fill(chi2);
                 widthOffsetHist.fill(fitResult.fittedParameter("width")-threePoleWidths[cid-1]);
                 widthOffsets[cid-1].fill(fitResult.fittedParameter("width")-threePoleWidths[cid-1]);
-
-
+                pedestal_prefit.fill(ped);
+                pedestal_fit.fill(fitResult.fittedFunction().parameters()[0]);
+                
                 if(hitCount % displayHits == 0){
                     System.out.println("Press enter to continue");
                     try{

Added: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains.java	(added)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains.java	Wed Sep 21 12:28:15 2016
@@ -0,0 +1,28 @@
+package org.hps.recon.ecal;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.base.BaseCluster;
+import org.lcsim.util.Driver;
+
+/**
+ * applies a time-dependent global correction factor to the energies of all ecal clusters.
+ * @author spaul
+ *
+ */
+public abstract class TimeDependentEcalGains extends Driver{
+    String ecalClusterCollectionName = "EcalClustersCorr";
+    public void setEcalClusterCollectionName(String name){
+        this.ecalClusterCollectionName = name;
+    }
+    public void process(EventHeader event){
+        double gain = getGain(event.getTimeStamp());
+        for(BaseCluster c : event.get(BaseCluster.class, ecalClusterCollectionName)){
+            double old_energy = c.getEnergy();
+            double new_energy = old_energy*gain;
+            c.setEnergy(new_energy);
+        }
+    }
+    
+    protected abstract double getGain(long timeStamp);
+    
+}

Added: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains2016.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains2016.java	(added)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/TimeDependentEcalGains2016.java	Wed Sep 21 12:28:15 2016
@@ -0,0 +1,52 @@
+package org.hps.recon.ecal;
+
+public class TimeDependentEcalGains2016 extends TimeDependentEcalGains {
+    private long[] rangeStarts = new long[]{
+            1457140000,
+            1457250000,
+            1460200000,
+            1460750000,
+            1461350000,
+            1461460000
+    };
+    private long[] rangeEnds = new long[]{
+            1457230000,
+            1457350000,
+            1460400000,
+            1461000000,
+            1461450000,
+            1461580000
+    };
+    private double[] A = new double[]{
+            2.2585,
+            2.29357,
+            2.25799,
+            2.26538,
+            2.27994,
+            2.26131
+    };
+    private double[] B = new double[]{
+            .399416,
+            .0388658,
+            .117147,
+            .163699,
+            -.00861751,
+            .0794
+    };
+    private double[] C = new double[]{
+            6.574,
+            30863,
+            39662.9,
+            40000,
+            15844.2,
+            25784.6
+    };
+    protected double getGain(long timeStamp) {
+        for(int i = 0; i<rangeStarts.length; i++){
+            if(timeStamp > rangeStarts[i] && timeStamp<rangeEnds[i]){
+                return A[i]-B[i]*Math.exp(-(timeStamp-rangeStarts[i])/C[i]);
+            }
+        }
+        return 1;
+    }
+}

Added: java/trunk/users/src/main/java/org/hps/users/spaul/EcalPeaks.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/EcalPeaks.java	(added)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/EcalPeaks.java	Wed Sep 21 12:28:15 2016
@@ -0,0 +1,218 @@
+package org.hps.users.spaul;
+
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.hps.conditions.beam.BeamEnergy.BeamEnergyCollection;
+import org.hps.record.triggerbank.AbstractIntData;
+import org.hps.record.triggerbank.TIData;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.Track;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
+import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.geometry.Detector;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+
+import hep.aida.IHistogram1D;
+
+public class EcalPeaks extends Driver {
+	private static final Logger LOGGER = Logger.getLogger(EcalPeaks.class.getPackage().getName());
+
+	private String clusterCollection = "EcalClustersCorr";
+	private String particleCollection = "FinalStateParticles";
+	
+	protected Double beamEnergy;
+    public void setBeamEnergy(double e){
+    this.beamEnergy = e;
+    }
+    public double getBeamEnergy(){
+    return this.beamEnergy;
+    }
+    @Override
+    protected void detectorChanged(Detector detector){
+        BeamEnergyCollection beamEnergyCollection = 
+            this.getConditionsManager().getCachedConditions(BeamEnergyCollection.class, "beam_energies").getCachedData();        
+        if(beamEnergy== null && beamEnergyCollection != null && beamEnergyCollection.size() != 0)
+            beamEnergy = beamEnergyCollection.get(0).getBeamEnergy();
+        else{
+            LOGGER.log(Level.WARNING, "warning:  beam energy not found.  Using a 6.6 GeV as the default energy");
+            beamEnergy = 6.6;
+        }
+        
+        oneClusterPeak = aida.histogram1D("one_cluster_peak", 200, 0, 
+        		1.5*beamEnergy);
+        twoClusterPeak = aida.histogram1D("two_cluster_peak", 200, 0, 
+        		1.5*beamEnergy);
+        oneClusterPeakTrackAssist = aida.histogram1D("one_cluster_peak_ta", 200, 0, 
+        		1.5*beamEnergy);
+        twoClusterPeakTrackAssist = aida.histogram1D("two_cluster_peak_ta", 200, 0, 
+        		1.5*beamEnergy);
+        pairClusterEnergyDiffCut = beamEnergy*.5;
+        pairClusterFeeCut = beamEnergy*.8;
+       
+    }
+    
+    private IHistogram1D oneClusterPeak, twoClusterPeak,
+    oneClusterPeakTrackAssist,
+    twoClusterPeakTrackAssist, timestamps;
+	AIDA aida = AIDA.defaultInstance();
+
+	private double pairClusterTimeCut = 2;
+	private double pairClusterEnergyDiffCut;
+	private double pairClusterFeeCut;
+	private boolean firstEvent = true;
+	
+	//private long firsttimestamp = 0;
+	
+	
+	@Override
+	public void process(EventHeader event){
+		int runNumber = event.getRunNumber();
+		
+		long timestamp = event.getTimeStamp();
+		//System.out.println(timestamp);
+		
+		if(firstEvent){
+			long firsttimestamp = timestamp;
+			double n_minutes = 5; //anything higher than this, and there was probably a beam trip during this file.
+			timestamps = aida.histogram1D("timestamps", 200, firsttimestamp, firsttimestamp+n_minutes*60000);
+		}
+		timestamps.fill(timestamp);
+		firstEvent = false;
+		boolean isSingle1 = false;
+		boolean isPair1 = false;
+		for (GenericObject gob : event.get(GenericObject.class,"TriggerBank"))
+		{
+			if (!(AbstractIntData.getTag(gob) == TIData.BANK_TAG)) continue;
+			TIData tid = new TIData(gob);
+			if (tid.isPair1Trigger()) isPair1 = true;
+			if (tid.isSingle1Trigger()) isSingle1 = true;
+		}
+		
+		if(!isSingle1 && !isPair1)
+			return;
+
+		List<Cluster> clusters = event.get(Cluster.class, clusterCollection);
+		List<ReconstructedParticle> particles = event.get(ReconstructedParticle.class, particleCollection);
+		if(isSingle1)
+			processSingle1(clusters, particles, event);
+		if(isPair1)
+			processPair1(clusters, particles, event);
+	}
+	
+	
+	
+	private void processSingle1(List<Cluster> clusters, List<ReconstructedParticle> particles, EventHeader event) {
+		
+
+		fill1ClusterPeak(clusters, oneClusterPeak);
+		//first filter out any events that don't have a track with at least 90\% of the beam energy.  
+		//This will significantly reduce the size of the left-side tail of the Ecal measured energy distribution.
+		ReconstructedParticle feeFound = null;
+		for(ReconstructedParticle p: particles){
+			if(p.getMomentum().z() > .9*beamEnergy && p.getTracks().size() != 0){
+				feeFound = p;
+				break;
+			}
+		}
+		if(feeFound == null)
+			return;
+		fill1ClusterPeak(clusters, oneClusterPeakTrackAssist);
+		
+		
+	}
+	
+	void fill1ClusterPeak(List<Cluster> clusters, IHistogram1D oneClusterPeak){
+		for(Cluster c1 : clusters){
+			if(c1.getSize() < 3)
+				continue;
+			if(isEdge(c1))
+				continue;
+			oneClusterPeak.fill(c1.getEnergy());
+				
+		}
+	}
+	private boolean isEdge(Cluster c){
+		CalorimeterHit seed = c.getCalorimeterHits().get(0);
+		int ix = seed.getIdentifierFieldValue("ix");
+		int iy = seed.getIdentifierFieldValue("iy");
+		return isEdge(ix, iy);
+			
+	}
+	private boolean isEdge(int ix, int iy){
+        if(iy == 5 || iy == 1 || iy == -1 || iy == -5)
+            return true;
+        if(ix == -23 || ix == 23)
+            return true;
+        if((iy == 2 || iy == -2) && (ix >=-11 && ix <= -1))
+            return true;
+        return false;
+    }
+	private void processPair1(List<Cluster> clusters, List<ReconstructedParticle> particles, EventHeader event) {
+		
+		fill2ClusterPeak(clusters, twoClusterPeak);
+		
+		//first, filter out undeserirable events using the SVT.  
+		boolean found = false;
+		for(ReconstructedParticle top : particles){
+			if(top.getMomentum().y() < 0)
+				continue;
+			if(top.getMomentum().z() > .9*beamEnergy)
+				continue;
+			if(top.getTracks().size() == 0)
+				continue;
+			for(ReconstructedParticle bot : particles){
+				if(bot.getMomentum().y()>0)
+					continue;
+				if(bot.getMomentum().z() > .9*beamEnergy)
+					continue;
+				if(bot.getTracks().size() == 0)
+					continue;
+				double pztot = top.getMomentum().z() + bot.getMomentum().z();
+				if(pztot > .8*beamEnergy && pztot < 1.2*beamEnergy){
+					found = true;
+					break;
+				}
+			}
+		}
+		if(! found)
+			return;
+		
+		fill2ClusterPeak(clusters, twoClusterPeakTrackAssist);
+		
+	}
+	
+	void fill2ClusterPeak(List<Cluster> clusters, IHistogram1D twoClusterPeak){
+		for(Cluster top : clusters){
+			//make sure it is not a FEE.
+			if(top.getEnergy() > pairClusterFeeCut)
+				continue;
+			//nor an edge crystal
+			if(isEdge(top))
+				continue;
+			if(top.getPosition()[1]< 0)
+				continue;
+			for(Cluster bot: clusters){
+				//make sure it's not a FEE
+				if(bot.getEnergy() > pairClusterFeeCut)
+					continue;
+				//nor an edge crystal
+				if(isEdge(bot))
+					continue;
+				//make sure it's on the opposite side of the detector
+				if(bot.getPosition()[1]>0)
+					continue;
+				//cluster time cut
+				if(Math.abs(top.getCalorimeterHits().get(0).getTime()-bot.getCalorimeterHits().get(0).getTime())>pairClusterTimeCut )
+					continue;
+				
+				
+				twoClusterPeak.fill(bot.getEnergy()+ top.getEnergy());
+			}
+		}
+	}
+}

Modified: java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java
 =============================================================================
--- java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java	(original)
+++ java/trunk/users/src/main/java/org/hps/users/spaul/ScalerAttenuationCalculator.java	Wed Sep 21 12:28:15 2016
@@ -41,7 +41,7 @@
 public class ScalerAttenuationCalculator {
 	//after the value of scalerS2b drops below this value, (at a beam trip)
 	// take 10 samples, and use the 10th sample is the zero point value
-	static double zp_threshold = 500;
+	//static double zp_threshold = 500;
 
 	/**
 	 *