Print

Print


Commit in java/branches/hps_java_trunk_HPSJAVA-251 on MAIN
ecal-recon/src/main/java/org/hps/recon/ecal/ClockDriver.java+25added 1010
                                           /ClockSingleton.java+47added 1010
                                           /DummyTriggerDriver.java+23added 1010
                                           /EcalConverterDriver.java+1-121009 -> 1010
                                           /EcalEdepToTriggerConverterDriver.java+8-791009 -> 1010
                                           /EcalRawConverter.java+12-931009 -> 1010
                                           /EcalRawConverterDriver.java+13-1121009 -> 1010
                                           /EcalReadoutDriver.java+155added 1010
                                           /EcalReadoutToTriggerConverterDriver.java+6-931009 -> 1010
                                           /EcalTriggerFilterDriver.java+8-811009 -> 1010
                                           /FADCConverterDriver.java+3-521009 -> 1010
                                           /FADCEcalReadoutDriver.java+649added 1010
                                           /FADCPrimaryTriggerDriver.java+889added 1010
                                           /FADCTriggerDriver.java+678added 1010
                                           /FADCTriggerVariableDriver.java+172added 1010
                                           /HPSCalorimeterHit.java+2-831009 -> 1010
                                           /NeutralPionTriggerDriver.java+1081added 1010
                                           /ReadoutTimestamp.java+109added 1010
                                           /RingBuffer.java+55added 1010
                                           /SimpleEcalReadoutDriver.java+59added 1010
                                           /TestRunTriggerDriver.java+108added 1010
                                           /TimeEvolutionEcalReadoutDriver.java+92added 1010
                                           /TriggerData.java+111added 1010
                                           /TriggerDriver.java+193added 1010
                                           /TriggerableDriver.java+56added 1010
evio/src/main/java/org/hps/evio/ECalEvioReader.java+12-741009 -> 1010
                               /ECalHitWriter.java+17-1061009 -> 1010
                               /TestRunEvioToLcio.java+1-101009 -> 1010
                               /TestRunReconToEvio.java-131009 -> 1010
                               /TestRunTriggeredReconToEvio.java+1-171009 -> 1010
                               /TestRunTriggeredReconToLcio.java+51-231009 -> 1010
+4637-848
17 added + 14 modified, total 31 files
Abandon botched merges from modules and revert to trunk copies.

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
ClockDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/ClockDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/ClockDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,25 @@
+package org.hps.readout.ecal;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+
+/**
+ * Driver to run the clock in ClockSingleton. Run this driver last.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: ClockDriver.java,v 1.2 2011/10/07 23:14:55 meeg Exp $
+ */
+public class ClockDriver extends Driver {
+    public void setDt(double dt) {
+        ClockSingleton.setDt(dt);
+    }
+
+    public void process(EventHeader event) {
+        ClockSingleton.step();
+        TriggerDriver.resetTrigger();
+    }
+
+    public void startOfData() {
+        ClockSingleton.init();
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
ClockSingleton.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/ClockSingleton.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/ClockSingleton.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,47 @@
+/*
+ * To change this template, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package org.hps.readout.ecal;
+
+/**
+ * singleton clock class - use ClockDriver to control.
+ * A better solution might be to store absolute time in the event.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: ClockSingleton.java,v 1.6 2012/07/31 00:08:40 meeg Exp $
+ */
+public class ClockSingleton {
+
+	public static final ClockSingleton _instance = new ClockSingleton();
+	private int clock;
+	//time between events (bunch spacing)
+	private double dt = 2.0;
+
+	private ClockSingleton() {
+	}
+
+	public static void init() {
+		_instance.clock = 0;
+	}
+
+	public static int getClock() {
+		return _instance.clock;
+	}
+
+	public static double getTime() {
+		return _instance.dt * _instance.clock;
+	}
+
+	public static double getDt() {
+		return _instance.dt;
+	}
+
+	public static void setDt(double dt) {
+		_instance.dt = dt;
+	}
+
+	public static void step() {
+		_instance.clock++;
+	}
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
DummyTriggerDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/DummyTriggerDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/DummyTriggerDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,23 @@
+package org.hps.readout.ecal;
+
+import org.lcsim.event.EventHeader;
+
+/**
+ * Free-running trigger - triggers on every Nth event
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: DummyTriggerDriver.java,v 1.3 2013/04/02 01:11:11 meeg Exp $
+ */
+public class DummyTriggerDriver extends TriggerDriver {
+
+    int period = 100;
+
+    public void setPeriod(int period) {
+        this.period = period;
+    }
+
+    @Override
+    public boolean triggerDecision(EventHeader event) {
+        return (ClockSingleton.getClock() % period == 0);
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
EcalConverterDriver.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalConverterDriver.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalConverterDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -2,11 +2,9 @@
 
 import java.util.ArrayList;
 import java.util.List;
-
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.EventHeader;
 import org.lcsim.event.RawCalorimeterHit;
-import org.lcsim.geometry.Detector;
 import org.lcsim.util.Driver;
 import org.lcsim.lcio.LCIOConstants;
 
@@ -16,8 +14,6 @@
  * @version $Id: EcalConverterDriver.java,v 1.1 2013/02/25 22:39:24 meeg Exp $
  */
 public class EcalConverterDriver extends Driver {
-	
-	Detector detector = null;
 
     String rawCollectionName;
     String ecalReadoutName = "EcalHits";
@@ -55,11 +51,6 @@
             throw new RuntimeException("The parameter ecalCollectionName was not set!");
         }
     }
-    
-    @Override
-    public void detectorChanged(Detector detector) {
-    	this.detector = detector;
-    }
 
     @Override
     public void process(EventHeader event) {
@@ -86,9 +77,7 @@
     }
 
     private CalorimeterHit HitDtoA(RawCalorimeterHit hit) {
-    	HPSCalorimeterHit h = new HPSCalorimeterHit(DtoA(hit.getAmplitude(), hit.getCellID()), period * hit.getTimeStamp() + dt, hit.getCellID(), 0);
-        h.setDetector(detector);
-    	return h;
+        return new HPSCalorimeterHit(DtoA(hit.getAmplitude(), hit.getCellID()), period * hit.getTimeStamp() + dt, hit.getCellID(), 0);
     }
 
 //    private RawCalorimeterHit HitAtoD(CalorimeterHit hit) {

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
EcalEdepToTriggerConverterDriver.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalEdepToTriggerConverterDriver.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalEdepToTriggerConverterDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -3,40 +3,20 @@
 import java.util.ArrayList;
 import java.util.List;
 
-
-//import org.hps.conditions.deprecated.EcalConditions;
-import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.detector.identifier.Identifier;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.hps.util.RandomGaussian;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.EventHeader;
 import org.lcsim.geometry.Detector;
 import org.lcsim.util.Driver;
 
-
 /**
  *
  * @version $Id: HPSEcalRawConverterDriver.java,v 1.2 2012/05/03 00:17:54
  * phansson Exp $
  */
 public class EcalEdepToTriggerConverterDriver extends Driver {
-	
-	Detector detector = null;
-    static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null; 
-    
-    private static boolean isBadChannelLoaded = true;
-	
+
     private String ecalReadoutName = "EcalHits";
     private String inputCollection = "EcalHits";
     private String readoutCollection = "EcalCalHits";
@@ -95,31 +75,12 @@
         }
     }
 
-
     @Override
     public void detectorChanged(Detector detector) {
-    	
-    	//Must be set to use the database conditions
-        this.detector = detector;
-    	
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        // ID helper.
-        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
-        
-        System.out.println("You are now using the database conditions for EcalEdepToTriggerConverterDriver.");
     }
 
     public boolean isBadCrystal(CalorimeterHit hit) {
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(hit.getCellID());
-    	
-        return isBadChannelLoaded ? channelData.isBadChannel() : false;
+        return EcalConditions.badChannelsLoaded() ? EcalConditions.isBadChannel(hit.getCellID()) : false;
     }
 
     @Override
@@ -180,9 +141,7 @@
 
         int truncatedIntegral = (int) Math.floor(triggerIntegral / truncateScale);
         if (truncatedIntegral > 0) {
-        	HPSCalorimeterHit h = new HPSCalorimeterHit(truncatedIntegral, hit.getTime(), hit.getCellID(), 0);
-        	h.setDetector(detector);
-            return h ;
+            return new HPSCalorimeterHit(truncatedIntegral, hit.getTime(), hit.getCellID(), 0);
         }
         return null;
     }
@@ -192,12 +151,8 @@
             return null;
         }
 
-        
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(hit.getCellID());
-        
 //        double integral = hit.getRawEnergy()/ECalUtils.GeV * gainScale;
-        double gain = _gain > 0 ? _gain : channelData.getGain().getGain();
+        double gain = _gain > 0 ? _gain : EcalConditions.physicalToGain(hit.getCellID());
         double integral = amplitude * gain * pulseIntegral * gainScale * ECalUtils.MeV / ECalUtils.GeV;
 
 //        double thresholdCrossingTime = 0 - hit.getTime();
@@ -221,24 +176,19 @@
 //        System.out.format("dumb: %f, full: %f\n",hit.getRawEnergy() * 1000.0,readoutIntegral * HPSEcalConditions.physicalToGain(id));
 
 //        System.out.format("readout: %f %f\n", amplitude, integral);
-        HPSCalorimeterHit h = new HPSCalorimeterHit(integral, hit.getTime(), hit.getCellID(), 0);
-        h.setDetector(detector);
+        CalorimeterHit h = new HPSCalorimeterHit(integral, hit.getTime(), hit.getCellID(), 0);
         return h;
     }
 
     private double hitAmplitude(CalorimeterHit hit) {
         double energyAmplitude = hit.getRawEnergy();
-        
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(hit.getCellID());
-        
         if (addNoise) {
             //add preamp noise and photoelectron Poisson noise in quadrature
-            double noise = Math.sqrt(Math.pow(channelData.getCalibration().getNoise() * channelData.getGain().getGain() * ECalUtils.MeV, 2) + hit.getRawEnergy() * ECalUtils.MeV / pePerMeV);
+            double noise = Math.sqrt(Math.pow(EcalConditions.physicalToNoise(hit.getCellID()) * EcalConditions.physicalToGain(hit.getCellID()) * ECalUtils.MeV, 2) + hit.getRawEnergy() * ECalUtils.MeV / pePerMeV);
             energyAmplitude += RandomGaussian.getGaussian(0, noise);
         }
 
-        double gain = _gain > 0 ? _gain : channelData.getGain().getGain();
+        double gain = _gain > 0 ? _gain : EcalConditions.physicalToGain(hit.getCellID());
 //        System.out.format("amplitude: %f %f %f %f\n", hit.getRawEnergy(), energyAmplitude, gain, (energyAmplitude / ECalUtils.MeV) / (gain * pulseIntegral));
         return (energyAmplitude / ECalUtils.MeV) / (gain * pulseIntegral);
     }
@@ -257,25 +207,4 @@
             }
         }
     }
-    
-    /** 
-     * Convert physical ID to gain value.
-     * @param cellID (long)
-     * @return channel constants (EcalChannelConstants)
-     */
-    private static EcalChannelConstants findChannel(long cellID) {
-        // Make an ID object from raw hit ID.
-        IIdentifier id = new Identifier(cellID);
-        
-        // Get physical field values.
-        int system = helper.getValue(id, "system");
-        int x = helper.getValue(id, "ix");
-        int y = helper.getValue(id, "iy");
-        
-        // Create an ID to search for in channel collection.
-        GeometryId geometryId = new GeometryId(helper, new int[] { system, x, y });
-                
-        // Get the channel data.
-        return ecalConditions.getChannelConstants(channels.findChannel(geometryId));    
-    }
 }

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
EcalRawConverter.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverter.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverter.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -1,22 +1,10 @@
 package org.hps.recon.ecal;
 
-import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.detector.identifier.Identifier;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.RawCalorimeterHit;
 import org.lcsim.event.RawTrackerHit;
 import org.lcsim.event.base.BaseRawCalorimeterHit;
-import org.lcsim.geometry.Detector;
 
 /**
  *
@@ -29,17 +17,8 @@
     private boolean constantGain = false;
     private double gain;
     private boolean use2014Gain = true;
-    
-    //get the database condition manager
-    
-//    Detector detector = DatabaseConditionsManager.getInstance().getDetectorObject();
-    Detector detector = null;
-    static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null; 
 
-
-    public EcalRawConverter() {	
+    public EcalRawConverter() {
     }
 
     public void setGain(double gain) {
@@ -57,11 +36,7 @@
         if (debug) {
             System.out.println("Summing ADC for hit: " + hit.toString());
         }
-        
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(hit.getCellID());
-        
-        double pedestal = channelData.getCalibration().getPedestal();
+        double pedestal = EcalConditions.physicalToPedestal(hit.getCellID());
         short sum = 0;
         short samples[] = hit.getADCValues();
         for (int isample = 0; isample < samples.length; ++isample) {
@@ -77,12 +52,10 @@
         double time = hit.getTime();
         long id = hit.getCellID();
         double rawEnergy = adcToEnergy(sumADC(hit), id);
-        HPSCalorimeterHit h1 = new HPSCalorimeterHit(rawEnergy + 0.0000001, time, id, 0);
-        h1.setDetector(detector);
-        
 //        double[] pos = hit.getDetectorElement().getGeometry().getPosition().v();
+        CalorimeterHit h = new HPSCalorimeterHit(rawEnergy + 0.0000001, time, id, 0);
         //+0.0000001 is a horrible hack to ensure rawEnergy!=BaseCalorimeterHit.UNSET_CORRECTED_ENERGY
-        return h1;
+        return h;
     }
 
     public CalorimeterHit HitDtoA(RawCalorimeterHit hit, int window, double timeOffset) {
@@ -91,26 +64,21 @@
         }
         double time = hit.getTimeStamp() / 16.0;
         long id = hit.getCellID();
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(id);
-        double adcSum = hit.getAmplitude() - window * channelData.getCalibration().getPedestal();
+        double adcSum = hit.getAmplitude() - window * EcalConditions.physicalToPedestal(id);
         double rawEnergy = adcToEnergy(adcSum, id);
-        HPSCalorimeterHit h2 = new HPSCalorimeterHit(rawEnergy + 0.0000001, time + timeOffset, id, 0);
-        h2.setDetector(detector);
+        CalorimeterHit h = new HPSCalorimeterHit(rawEnergy + 0.0000001, time + timeOffset, id, 0);
         //+0.0000001 is a horrible hack to ensure rawEnergy!=BaseCalorimeterHit.UNSET_CORRECTED_ENERGY
-        return h2;
+        return h;
     }
 
     public RawCalorimeterHit HitAtoD(CalorimeterHit hit, int window) {
         int time = (int) (Math.round(hit.getTime() / 4.0) * 64.0);
         long id = hit.getCellID();
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(id);
         int amplitude;
         if (constantGain) {
-            amplitude = (int) Math.round((hit.getRawEnergy() / ECalUtils.MeV) / gain + window * channelData.getCalibration().getPedestal());
+            amplitude = (int) Math.round((hit.getRawEnergy() / ECalUtils.MeV) / gain + window * EcalConditions.physicalToPedestal(id));
         } else {
-            amplitude = (int) Math.round((hit.getRawEnergy() / ECalUtils.MeV) / channelData.getGain().getGain() + window * channelData.getCalibration().getPedestal());
+            amplitude = (int) Math.round((hit.getRawEnergy() / ECalUtils.MeV) / EcalConditions.physicalToGain(id) + window * EcalConditions.physicalToPedestal(id));
         }
         RawCalorimeterHit h = new BaseRawCalorimeterHit(id, amplitude, time);
         return h;
@@ -120,21 +88,17 @@
      * return energy (units of GeV) corresponding to the ADC sum and crystal ID
      */
     private double adcToEnergy(double adcSum, long cellID) {
-    	
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(cellID);
-    	
         if (use2014Gain) {
             if (constantGain) {
                 return adcSum * ECalUtils.gainFactor * ECalUtils.ecalReadoutPeriod;
             } else {
-                return channelData.getGain().getGain() * adcSum * ECalUtils.gainFactor * ECalUtils.ecalReadoutPeriod; // should not be used for the moment (2014/02)
+                return EcalConditions.physicalToGain(cellID) * adcSum * ECalUtils.gainFactor * ECalUtils.ecalReadoutPeriod; // should not be used for the moment (2014/02)
             }
         } else {
             if (constantGain) {
                 return gain * adcSum * ECalUtils.MeV;
             } else {
-                return channelData.getGain().getGain() * adcSum * ECalUtils.MeV; //gain is defined as MeV/integrated ADC
+                return EcalConditions.physicalToGain(cellID) * adcSum * ECalUtils.MeV; //gain is defined as MeV/integrated ADC
             }
         }
     }
@@ -151,49 +115,4 @@
      return h;
      }
      */
-    /** 
-     * Must be set when an object EcalRawConverter is created.
-     * @param detector (long)
-     */   
-    void setDetector(Detector detector) {
-    	
-//    	h1.setDetector(detector);
-//    	h2.setDetector(detector);
-    	
-        this.detector = detector;
-        
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        // ID helper.
-        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
-        
-        System.out.println("You are now using the database conditions for EcalRawConverter.");
-    }
-    
-    /** 
-     * Convert physical ID to gain value.
-     * @param cellID (long)
-     * @return channel constants (EcalChannelConstants)
-     */
-    private static EcalChannelConstants findChannel(long cellID) {
-        // Make an ID object from raw hit ID.
-        IIdentifier id = new Identifier(cellID);
-        
-        // Get physical field values.
-        int system = helper.getValue(id, "system");
-        int x = helper.getValue(id, "ix");
-        int y = helper.getValue(id, "iy");
-        
-        // Create an ID to search for in channel collection.
-        GeometryId geometryId = new GeometryId(helper, new int[] { system, x, y });
-                
-        // Get the channel data.
-        return ecalConditions.getChannelConstants(channels.findChannel(geometryId));    
-    }   
-    
 }
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
EcalRawConverterDriver.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -3,19 +3,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.detector.identifier.Identifier;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.lcsim.detector.identifier.IIdentifierHelper;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.EventHeader;
 import org.lcsim.event.GenericObject;
@@ -32,13 +20,6 @@
  */
 public class EcalRawConverterDriver extends Driver {
 
-	// To import database conditions
-    static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null; 
-    
-    Detector detector = null;
-    
     EcalRawConverter converter = null;
     String rawCollectionName = "EcalReadoutHits";
     String ecalReadoutName = "EcalHits";
@@ -51,10 +32,9 @@
     private boolean runBackwards = false;
     private boolean useTimestamps = false;
     private boolean useTruthTime = false;
-    private static boolean isBadChannelLoaded = true;
 
     public EcalRawConverterDriver() {
-    	converter = new EcalRawConverter();    	
+        converter = new EcalRawConverter();
     }
 
     public void setUse2014Gain(boolean use2014Gain) {
@@ -114,43 +94,17 @@
 
     @Override
     public void detectorChanged(Detector detector) {
-    	
-    	converter.setDetector(detector);
-    	
-    	// set the detector for the converter
-        this.detector = detector;
-    	
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        // ID helper.
-        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
-        
-        System.out.println("You are now using the database conditions for EcalRawConverterDriver.");
     }
-    /**
-     * @return false if the channel is a good one, true if it is a bad one
-     * @param CalorimeterHit
-     */
-    public static boolean isBadCrystal(CalorimeterHit hit) {   	
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(hit.getCellID());
-    	
-        return isBadChannelLoaded ? channelData.isBadChannel() : false;
+
+    public static boolean isBadCrystal(CalorimeterHit hit) {
+        return EcalConditions.badChannelsLoaded() ? EcalConditions.isBadChannel(hit.getCellID()) : false;
     }
-    
-    /**
-     * @return false if the ADC is a good one, true if it is a bad one
-     * @param CalorimeterHit
-     */
-    public boolean isBadFADC(CalorimeterHit hit) {    	
-        return (getCrate(hit.getCellID()) == 1 && getSlot(hit.getCellID()) == 3);
+
+    public static boolean isBadFADC(CalorimeterHit hit) {
+        long daqID = EcalConditions.physicalToDaqID(hit.getCellID());
+        return (EcalConditions.getCrate(daqID) == 1 && EcalConditions.getSlot(daqID) == 3);
     }
-    
+
     private static double getTimestamp(int system, EventHeader event) { //FIXME: copied from org.hps.readout.ecal.ReadoutTimestamp
         if (event.hasCollection(GenericObject.class, "ReadoutTimestamps")) {
             List<GenericObject> timestamps = event.get(GenericObject.class, "ReadoutTimestamps");
@@ -181,8 +135,8 @@
             double t0ECal = getTimestamp(SYSTEM_ECAL, event);
             timeOffset += ((t0ECal + 250.0) % 500.0) - 250.0;
         }
-    	
-    	
+
+
         int flags = 0;
         flags += 1 << LCIOConstants.RCHBIT_TIME; //store hit time
         flags += 1 << LCIOConstants.RCHBIT_LONG; //store hit position; this flag has no effect for RawCalorimeterHits
@@ -196,11 +150,7 @@
 
                 for (RawTrackerHit hit : hits) {
                     CalorimeterHit newHit = converter.HitDtoA(hit);
-             
-                    // Get the channel data.
-                    EcalChannelConstants channelData = findChannel(newHit.getCellID());
-                    
-                    if (applyBadCrystalMap && channelData.isBadChannel()) {
+                    if (applyBadCrystalMap && isBadCrystal(newHit)) {
                         continue;
                     }
                     if (dropBadFADC && isBadFADC(newHit)) {
@@ -220,7 +170,6 @@
                         System.out.format("old hit energy %d\n", hit.getAmplitude());
                     }
                     CalorimeterHit newHit = converter.HitDtoA(hit, integralWindow, timeOffset);
-                    
                     if (newHit.getRawEnergy() > threshold) {
                         if (applyBadCrystalMap && isBadCrystal(newHit)) {
                             continue;
@@ -257,52 +206,4 @@
             }
         }
     }
-    
-    
-    /** 
-     * Convert physical ID to gain value.
-     * @param cellID (long)
-     * @return channel constants (EcalChannelConstants)
-     */
-    private static EcalChannelConstants findChannel(long cellID) {
-        // Make an ID object from raw hit ID.
-        IIdentifier id = new Identifier(cellID);
-        
-        // Get physical field values.
-        int system = helper.getValue(id, "system");
-        int x = helper.getValue(id, "ix");
-        int y = helper.getValue(id, "iy");
-        
-        // Create an ID to search for in channel collection.
-        GeometryId geometryId = new GeometryId(helper, new int[] { system, x, y });
-                
-        // Get the channel data.
-        return ecalConditions.getChannelConstants(channels.findChannel(geometryId));    
-    }
-    
-    /**
-     * Return crate number from cellID
-     * @param cellID (long)
-     * @return Crate number (int)
-     */
-    private int getCrate(long cellID) {
-        
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getCrate(helper, cellID);
-    }
-    
-    /**
-     * Return slot number from cellID
-     * @param cellID (long)
-     * @return Slot number (int)
-     */
-    private int getSlot(long cellID) {
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getSlot(helper, cellID);         
-    }
-    
 }

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
EcalReadoutDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalReadoutDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalReadoutDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,155 @@
+package org.hps.readout.ecal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.lcio.LCIOConstants;
+
+/**
+ * Performs readout of ECal hits.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: EcalReadoutDriver.java,v 1.4 2013/03/20 01:03:32 meeg Exp $
+ */
+public abstract class EcalReadoutDriver<T> extends TriggerableDriver {
+
+    String ecalCollectionName;
+    String ecalRawCollectionName = "EcalRawHits";
+    String ecalReadoutName = "EcalHits";
+    Class hitClass;
+    //hit type as in org.lcsim.recon.calorimetry.CalorimeterHitType
+    int hitType = 0;
+    //number of bunches in readout cycle
+    int readoutCycle = 1;
+    //minimum readout value to write a hit
+    double threshold = 0.0;
+    //LCIO flags
+    int flags = 0;
+    //readout period in ns
+    double readoutPeriod = 2.0;
+    //readout period time offset in ns
+    double readoutOffset = 0.0;
+    //readout period counter
+    int readoutCounter;
+    public static boolean readoutBit = false;
+    protected boolean debug = false;
+
+    public EcalReadoutDriver() {
+        flags += 1 << LCIOConstants.CHBIT_LONG; //store position
+        flags += 1 << LCIOConstants.RCHBIT_ID1; //store cell ID
+        triggerDelay = 100.0;
+    }
+
+    public void setDebug(boolean debug) {
+        this.debug = debug;
+    }
+
+    public void setEcalReadoutName(String ecalReadoutName) {
+        this.ecalReadoutName = ecalReadoutName;
+    }
+
+    public void setEcalRawCollectionName(String ecalRawCollectionName) {
+        this.ecalRawCollectionName = ecalRawCollectionName;
+    }
+
+    public void setEcalCollectionName(String ecalCollectionName) {
+        this.ecalCollectionName = ecalCollectionName;
+    }
+
+    public void setReadoutCycle(int readoutCycle) {
+        this.readoutCycle = readoutCycle;
+        if (readoutCycle > 0) {
+            this.readoutPeriod = readoutCycle * ClockSingleton.getDt();
+        }
+    }
+
+    public void setReadoutOffset(double readoutOffset) {
+        this.readoutOffset = readoutOffset;
+    }
+
+    public void setReadoutPeriod(double readoutPeriod) {
+        this.readoutPeriod = readoutPeriod;
+        this.readoutCycle = -1;
+    }
+
+    public void setThreshold(double threshold) {
+        this.threshold = threshold;
+    }
+
+    @Override
+    public void startOfData() {
+        super.startOfData();
+        if (ecalCollectionName == null) {
+            throw new RuntimeException("The parameter ecalCollectionName was not set!");
+        }
+
+        readoutCounter = 0;
+
+        initReadout();
+    }
+
+    @Override
+    public void process(EventHeader event) {
+        //System.out.println(this.getClass().getCanonicalName() + " - process");
+        // Get the list of ECal hits.        
+        List<CalorimeterHit> hits;
+        if (event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
+            hits = event.get(CalorimeterHit.class, ecalCollectionName);
+        } else {
+            hits = new ArrayList<CalorimeterHit>();
+        }
+        //write hits into buffers
+        putHits(hits);
+
+        ArrayList<T> newHits = null;
+
+        //if at the end of a readout cycle, write buffers to hits
+        if (readoutCycle > 0) {
+            if ((ClockSingleton.getClock() + 1) % readoutCycle == 0) {
+                if (newHits == null) {
+                    newHits = new ArrayList<T>();
+                }
+                readHits(newHits);
+                readoutCounter++;
+            }
+        } else {
+            while (ClockSingleton.getTime() - readoutTime() + ClockSingleton.getDt() >= readoutPeriod) {
+                if (newHits == null) {
+                    newHits = new ArrayList<T>();
+                }
+                readHits(newHits);
+                readoutCounter++;
+            }
+        }
+
+        if (newHits != null) {
+            event.put(ecalRawCollectionName, newHits, hitClass, flags, ecalReadoutName);
+        }
+
+        checkTrigger(event);
+    }
+
+    protected double readoutTime() {
+        return readoutCounter * readoutPeriod + readoutOffset;
+    }
+
+    //read analog signal out of buffers and make hits; reset buffers
+    protected abstract void readHits(List<T> hits);
+
+    //add deposited energy to buffers
+    //must be run every event, even if the list is empty
+    protected abstract void putHits(List<CalorimeterHit> hits);
+
+    @Override
+    protected void processTrigger(EventHeader event) {
+    }
+
+    //initialize buffers
+    protected abstract void initReadout();
+
+    public int getTimestampType() {
+        return ReadoutTimestamp.SYSTEM_ECAL;
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
EcalReadoutToTriggerConverterDriver.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalReadoutToTriggerConverterDriver.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalReadoutToTriggerConverterDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -3,19 +3,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.detector.identifier.Identifier;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.lcsim.detector.identifier.IIdentifierHelper;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.EventHeader;
 import org.lcsim.event.base.BaseRawCalorimeterHit;
@@ -29,12 +17,6 @@
  */
 public class EcalReadoutToTriggerConverterDriver extends Driver {
 
-	// To import database conditions
-    static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null;
-	Detector detector = null;
-	
     String rawCollectionName = "EcalReadoutHits";
     String ecalReadoutName = "EcalHits";
     String ecalCollectionName = "EcalCalHits";
@@ -49,7 +31,6 @@
     private int triggerThreshold = 80;
     private double timeShift = 0;
     private int truncateScale = 128;
-    private static boolean isBadChannelLoaded = true;
 
     public EcalReadoutToTriggerConverterDriver() {
     }
@@ -95,30 +76,15 @@
 
     @Override
     public void detectorChanged(Detector detector) {
-    	this.detector = detector;
-    	
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        // ID helper.
-        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
-        
-        System.out.println("You are now using the database conditions for EcalReadoutToTriggerConverterDriver.");
     }
 
     public boolean isBadCrystal(CalorimeterHit hit) {
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(hit.getCellID());
-    	
-        return isBadChannelLoaded ? channelData.isBadChannel() : false;
+        return EcalConditions.badChannelsLoaded() ? EcalConditions.isBadChannel(hit.getCellID()) : false;
     }
 
     public boolean isBadFADC(CalorimeterHit hit) {
-        return (getCrate(hit.getCellID()) == 1 && getSlot(hit.getCellID()) == 3);
+        long daqID = EcalConditions.physicalToDaqID(hit.getCellID());
+        return (EcalConditions.getCrate(daqID) == 1 && EcalConditions.getSlot(daqID) == 3);
     }
 
     @Override
@@ -146,12 +112,8 @@
     }
 
     public CalorimeterHit HitDtoA(BaseRawCalorimeterHit hit, int window) {
-    	
-        // Get the channel data.
-        EcalChannelConstants channelData = findChannel(hit.getCellID());
-    	
         double integral = tp * Math.E / readoutPeriod;
-        double readoutIntegral = (hit.getAmplitude() - window * channelData.getCalibration().getPedestal());
+        double readoutIntegral = (hit.getAmplitude() - window * EcalConditions.physicalToPedestal(hit.getCellID()));
         double amplitude = readoutIntegral / integral;
 
 //        double time = readoutPeriod * (Math.random() - 1);
@@ -206,8 +168,7 @@
         if (truncatedIntegral <= 0) {
             truncatedIntegral = 0;
         }
-        HPSCalorimeterHit h = new HPSCalorimeterHit(truncatedIntegral, hitTime, id, 0);
-        h.setDetector(detector);
+        CalorimeterHit h = new HPSCalorimeterHit(truncatedIntegral, hitTime, id, 0);
 //        CalorimeterHit h = new HPSRawCalorimeterHit(triggerIntegral + 0.0000001, hit.getPosition(), hitTime, id, 0);
         //+0.0000001 is a horrible hack to ensure rawEnergy!=BaseCalorimeterHit.UNSET_CORRECTED_ENERGY
         return h;
@@ -227,52 +188,4 @@
             }
         }
     }
-
-    /** 
-     * Convert physical ID to gain value.
-     * @param cellID (long)
-     * @return channel constants (EcalChannelConstants)
-     */
-    private static EcalChannelConstants findChannel(long cellID) {
-        // Make an ID object from raw hit ID.
-        IIdentifier id = new Identifier(cellID);
-        
-        // Get physical field values.
-        int system = helper.getValue(id, "system");
-        int x = helper.getValue(id, "ix");
-        int y = helper.getValue(id, "iy");
-        
-        // Create an ID to search for in channel collection.
-        GeometryId geometryId = new GeometryId(helper, new int[] { system, x, y });
-                
-        // Get the channel data.
-        return ecalConditions.getChannelConstants(channels.findChannel(geometryId));    
-    }  
-    
-    /**
-     * Return crate number from cellID
-     * @param cellID (long)
-     * @return Crate number (int)
-     */
-    private int getCrate(long cellID) {
-        
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getCrate(helper, cellID);
-    }
-    
-    /**
-     * Return slot number from cellID
-     * @param cellID (long)
-     * @return Slot number (int)
-     */
-    private int getSlot(long cellID) {
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getSlot(helper, cellID);         
-    }
-    
-    
 }

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
EcalTriggerFilterDriver.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalTriggerFilterDriver.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/EcalTriggerFilterDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -5,18 +5,7 @@
 import java.util.Queue;
 import java.util.concurrent.ArrayBlockingQueue;
 
-import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel;
-import org.hps.conditions.ecal.EcalChannel.DaqId;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.EventHeader;
 import org.lcsim.geometry.Detector;
@@ -29,13 +18,6 @@
  */
 public class EcalTriggerFilterDriver extends Driver {
 
-	// To import database conditions
-    static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null;
-    int systemId;
-    Detector detector = null;
-    
     private String ecalReadoutName = "EcalHits";
     private String inputCollection = "EcalReadoutHits";
     private String outputCollection = "EcalCalHits";
@@ -73,22 +55,6 @@
 
     @Override
     public void detectorChanged(Detector detector) {
-    	
-    	this.detector = detector;
-    	
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        // ID helper.
-        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
-
-        systemId = detector.getSubdetector("Ecal").getSystemID();
-        
-        System.out.println("You are now using the database conditions for EcalTriggerFilterDriver.");
     }
 
     @Override
@@ -119,19 +85,15 @@
         }
     }
 
-    /**
-     * This method takes input hits and makes new hits with different ix
-     * @param CalorimeterHit hit
-     * @return new HPSCalorimeterHit
-     */
     private CalorimeterHit filterHit(CalorimeterHit hit) {
         int ix = hit.getIdentifierFieldValue("ix");
         int iy = hit.getIdentifierFieldValue("iy");
-        int crate = getCrate(hit.getCellID());
-        int slot = getSlot(hit.getCellID());
+        long daqID = EcalConditions.physicalToDaqID(hit.getCellID());
+        int crate = EcalConditions.getCrate(daqID);
+        short slot = EcalConditions.getSlot(daqID);
+        short channel = EcalConditions.getChannel(daqID);
 
-        int delay = iy>0?topDelay:bottomDelay;  
-        
+        int delay = iy>0?topDelay:bottomDelay;
         // no triggers from crate 1, slot 3 
         if (crate == 1 && slot == 3) {
             return null;
@@ -141,43 +103,8 @@
         if (ix > 0 && iy > 0) {
             ix = 24 - ix;
         }
-     
-        int values[] = {systemId, ix, iy};
-        GeometryId geomId = new GeometryId(helper, values);       
-        // Creating the new channel from cell id, ix and iy, then reading its ID       
-        long newID = geomId.encode();      
-        
+        long newID = EcalConditions.makePhysicalID(ix, iy);
         //make new hit; set position to null so it gets recalculated
-        HPSCalorimeterHit h = new HPSCalorimeterHit(hit.getRawEnergy(), hit.getTime()+delay*4, newID, hit.getType());
-        h.setDetector(detector);
-        return h;
+        return new HPSCalorimeterHit(hit.getRawEnergy(), hit.getTime()+delay*4, newID, hit.getType());
     }
-    
-    /**
-     * Return crate number from cellID
-     * @param cellID (long)
-     * @return Crate number (int)
-     */
-    private int getCrate(long cellID) {
-        
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getCrate(helper, cellID);
-    }
-    
-    /**
-     * Return slot number from cellID
-     * @param cellID (long)
-     * @return Slot number (int)
-     */
-    private int getSlot(long cellID) {
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getSlot(helper, cellID);         
-    } 
- 
-    
-    
 }

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
FADCConverterDriver.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCConverterDriver.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCConverterDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -3,15 +3,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.detector.identifier.Identifier;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.lcsim.event.EventHeader;
 import org.lcsim.event.RawTrackerHit;
 import org.lcsim.event.base.BaseRawCalorimeterHit;
@@ -24,10 +16,6 @@
  */
 public class FADCConverterDriver extends Driver {
 
-    EcalConditions ecalConditions = null;
-    IIdentifierHelper helper = null;
-    EcalChannelCollection channels = null; 
-    EcalRawConverter converter = null;
     String rawCollectionName = "EcalReadoutHits";
     String ecalReadoutName = "EcalHits";
     String ecalCollectionName = "EcalIntegralHits";
@@ -71,18 +59,7 @@
     }
 
     @Override
-    public void detectorChanged(Detector detector) {        
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        // ID helper.
-        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
-        
-        System.out.println("You are now using the database conditions for FADCConverterDriver");
+    public void detectorChanged(Detector detector) {
     }
 
     @Override
@@ -97,12 +74,8 @@
             for (RawTrackerHit hit : hits) {
                 short[] window = hit.getADCValues();
                 long id = hit.getCellID();
-                
-                // Get the channel data.
-                EcalChannelConstants channelData = findChannel(id);
-                
                 //do DAQ readout
-                double crystalThreshold = channelData.getCalibration().getPedestal() + threshold;
+                double crystalThreshold = EcalConditions.physicalToPedestal(id) + threshold;
                 int adcSum = 0;
                 int pointerOffset = 0;
                 int numSamplesToRead = 0;
@@ -127,26 +100,4 @@
         int flags = 0;
         event.put(ecalCollectionName, readoutHits, BaseRawCalorimeterHit.class, flags, ecalReadoutName);
     }
-    
-    /** 
-     * Convert physical ID to gain value.
-     * @param cellID (long)
-     * @return channel constants (EcalChannelConstants)
-     */
-    private EcalChannelConstants findChannel(long cellID) {
-        // Make an ID object from raw hit ID.
-        IIdentifier id = new Identifier(cellID);
-        
-        // Get physical field values.
-        int system = helper.getValue(id, "system");
-        int x = helper.getValue(id, "ix");
-        int y = helper.getValue(id, "iy");
-        
-        // Create an ID to search for in channel collection.
-        GeometryId geometryId = new GeometryId(helper, new int[] { system, x, y });
-                
-        // Get the channel data.
-        return ecalConditions.getChannelConstants(channels.findChannel(geometryId));    
-    }
-    
 }

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
FADCEcalReadoutDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCEcalReadoutDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCEcalReadoutDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,649 @@
+package org.hps.readout.ecal;
+
+import static org.hps.recon.ecal.ECalUtils.ecalReadoutPeriod;
+import static org.hps.recon.ecal.ECalUtils.fallTime;
+import static org.hps.recon.ecal.ECalUtils.maxVolt;
+import static org.hps.recon.ecal.ECalUtils.nBit;
+import static org.hps.recon.ecal.ECalUtils.readoutGain;
+import static org.hps.recon.ecal.ECalUtils.riseTime;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Set;
+
+import org.hps.conditions.ConditionsDriver;
+import org.hps.conditions.TableConstants;
+import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
+import org.hps.conditions.ecal.EcalChannel.GeometryId;
+import org.hps.conditions.ecal.EcalChannelConstants;
+import org.hps.conditions.ecal.EcalConditions;
+import org.lcsim.conditions.ConditionsManager;
+import org.lcsim.detector.identifier.IIdentifier;
+import org.lcsim.detector.identifier.IIdentifierHelper;
+import org.lcsim.detector.identifier.Identifier;
+import org.hps.recon.ecal.ECalUtils;
+import org.hps.recon.ecal.HPSRawCalorimeterHit;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.RawCalorimeterHit;
+import org.lcsim.event.RawTrackerHit;
+import org.lcsim.event.base.BaseRawCalorimeterHit;
+import org.lcsim.event.base.BaseRawTrackerHit;
+import org.lcsim.geometry.Detector;
+import org.lcsim.geometry.Subdetector;
+import org.lcsim.geometry.subdetector.HPSEcal3;
+import org.hps.util.RandomGaussian;
+import org.lcsim.lcio.LCIOConstants;
+
+/**
+ * Performs readout of ECal hits. Simulates time evolution of preamp output
+ * pulse.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: FADCEcalReadoutDriver.java,v 1.4 2013/10/31 00:11:02 meeg Exp $
+ */
+public class FADCEcalReadoutDriver extends EcalReadoutDriver<RawCalorimeterHit> {
+
+    // Repeated here from EventConstants in evio module to avoid depending on it.
+    private static final int ECAL_WINDOW_MODE = 1;
+    private static final int ECAL_PULSE_MODE = 2;
+    private static final int ECAL_PULSE_INTEGRAL_MODE = 3;
+    String ecalName = "Ecal";
+    Subdetector ecal;
+    EcalConditions ecalConditions = null;
+    IIdentifierHelper helper = null;
+    EcalChannelCollection channels = null; 
+    //buffer for preamp signals (units of volts, no pedestal)
+    private Map<Long, RingBuffer> signalMap = null;
+    //ADC pipeline for readout (units of ADC counts)
+    private Map<Long, FADCPipeline> pipelineMap = null;
+    //buffer for window sums
+    private Map<Long, Integer> sumMap = null;
+    //buffer for timestamps
+    private Map<Long, Integer> timeMap = null;
+    //queue for hits to be output to clusterer
+    private PriorityQueue<HPSRawCalorimeterHit> outputQueue = null;
+    //length of ring buffer (in readout cycles)
+    private int bufferLength = 100;
+    //length of readout pipeline (in readout cycles)
+    private int pipelineLength = 2000;
+    //shaper time constant in ns
+    private double tp = 6.95;
+    //delay (number of readout periods) between start of summing window and output of hit to clusterer
+    private int delay0 = 32;
+    //start of readout window relative to trigger time (in readout cycles)
+    //in FADC documentation, "Programmable Latency" or PL
+    private int readoutLatency = 100;
+    //number of ADC samples to read out
+    //in FADC documentation, "Programmable Trigger Window" or PTW
+    private int readoutWindow = 100;
+    //number of ADC samples to read out before each rising threshold crossing
+    //in FADC documentation, "number of samples before" or NSB
+    private int numSamplesBefore = 5;
+    //number of ADC samples to read out after each rising threshold crossing
+    //in FADC documentation, "number of samples before" or NSA
+    private int numSamplesAfter = 30;
+//    private HPSEcalConverter converter = null;
+    //output buffer for hits
+    private LinkedList<HPSRawCalorimeterHit> buffer = new LinkedList<HPSRawCalorimeterHit>();
+    //number of readout periods for which a given hit stays in the buffer
+    private int coincidenceWindow = 2;
+    //output collection name for hits read out from trigger
+    private String ecalReadoutCollectionName = "EcalReadoutHits";
+    private int mode = ECAL_PULSE_INTEGRAL_MODE;
+    private int readoutThreshold = 10;
+    private int triggerThreshold = 10;
+    private double scaleFactor = 1;
+    private double fixedGain = -1;
+    private boolean constantTriggerWindow = true;
+    private boolean addNoise = false;
+    private double pePerMeV = 2.0; //photoelectrons per MeV, used to calculate noise
+    //switch between test run and 2014 definitions of gain constants
+    private boolean use2014Gain = true;
+    //switch between three pulse shape functions
+    private PulseShape pulseShape = PulseShape.ThreePole;
+
+    public enum PulseShape {
+
+        CRRC, DoubleGaussian, ThreePole
+    }
+
+    public FADCEcalReadoutDriver() {
+        flags = 0;
+        flags += 1 << LCIOConstants.RCHBIT_TIME; //store timestamp
+        hitClass = HPSRawCalorimeterHit.class;
+        setReadoutPeriod(ecalReadoutPeriod);
+//        converter = new HPSEcalConverter(null);
+    }
+
+    public void setAddNoise(boolean addNoise) {
+        this.addNoise = addNoise;
+    }
+
+    public void setConstantTriggerWindow(boolean constantTriggerWindow) {
+        this.constantTriggerWindow = constantTriggerWindow;
+    }
+
+    public void setFixedGain(double fixedGain) {
+        this.fixedGain = fixedGain;
+    }
+
+    public void setEcalName(String ecalName) {
+        this.ecalName = ecalName;
+    }
+
+    public void setReadoutThreshold(int readoutThreshold) {
+        this.readoutThreshold = readoutThreshold;
+    }
+
+    public void setScaleFactor(double scaleFactor) {
+        this.scaleFactor = scaleFactor;
+    }
+
+    public void setTriggerThreshold(int triggerThreshold) {
+        this.triggerThreshold = triggerThreshold;
+    }
+
+    public void setEcalReadoutCollectionName(String ecalReadoutCollectionName) {
+        this.ecalReadoutCollectionName = ecalReadoutCollectionName;
+    }
+
+    public void setNumSamplesAfter(int numSamplesAfter) {
+        this.numSamplesAfter = numSamplesAfter;
+    }
+
+    public void setNumSamplesBefore(int numSamplesBefore) {
+        this.numSamplesBefore = numSamplesBefore;
+    }
+
+    public void setReadoutLatency(int readoutLatency) {
+        this.readoutLatency = readoutLatency;
+    }
+
+    public void setReadoutWindow(int readoutWindow) {
+        this.readoutWindow = readoutWindow;
+    }
+
+    public void setCoincidenceWindow(int coincidenceWindow) {
+        this.coincidenceWindow = coincidenceWindow;
+    }
+
+    public void setUse2014Gain(boolean use2014Gain) {
+        this.use2014Gain = use2014Gain;
+    }
+
+    public void setPulseShape(String pulseShape) {
+        this.pulseShape = PulseShape.valueOf(pulseShape);
+    }
+
+    public void setTp(double tp) {
+        this.tp = tp;
+    }
+
+//    public void setFallTime(double fallTime) {
+//        this.fallTime = fallTime;
+//    }
+    public void setPePerMeV(double pePerMeV) {
+        this.pePerMeV = pePerMeV;
+    }
+
+//    public void setRiseTime(double riseTime) {
+//        this.riseTime = riseTime;
+//    }
+    public void setDelay0(int delay0) {
+        this.delay0 = delay0;
+    }
+
+    public void setBufferLength(int bufferLength) {
+        this.bufferLength = bufferLength;
+        resetFADCBuffers();
+    }
+
+    public void setPipelineLength(int pipelineLength) {
+        this.pipelineLength = pipelineLength;
+        resetFADCBuffers();
+    }
+
+    public void setMode(int mode) {
+        this.mode = mode;
+        if (mode != ECAL_WINDOW_MODE && mode != ECAL_PULSE_MODE && mode != ECAL_PULSE_INTEGRAL_MODE) {
+            throw new IllegalArgumentException("invalid mode " + mode);
+        }
+    }
+
+    /**
+     * Return the map of preamp signal buffers. For debug only.
+     *
+     * @return
+     */
+    public Map<Long, RingBuffer> getSignalMap() {
+        return signalMap;
+    }
+
+    /**
+     * Return the map of FADC pipelines. For debug only.
+     *
+     * @return
+     */
+    public Map<Long, FADCPipeline> getPipelineMap() {
+        return pipelineMap;
+    }
+
+    @Override
+    protected void readHits(List<RawCalorimeterHit> hits) {
+
+        for (Long cellID : signalMap.keySet()) {
+            RingBuffer signalBuffer = signalMap.get(cellID);
+
+            FADCPipeline pipeline = pipelineMap.get(cellID);
+            pipeline.step();
+           
+            // Get the channel data.
+            EcalChannelConstants channelData = findChannel(cellID);
+
+            double currentValue = signalBuffer.currentValue() * ((Math.pow(2, nBit) - 1) / maxVolt); //12-bit ADC with maxVolt V range
+            int pedestal = (int) Math.round(channelData.getCalibration().getPedestal());
+            int digitizedValue = Math.min((int) Math.round(pedestal + currentValue), (int) Math.pow(2, nBit)); //ADC can't return a value larger than 4095; 4096 (overflow) is returned for any input >2V
+            pipeline.writeValue(digitizedValue);
+            int pedestalSubtractedValue = digitizedValue - pedestal;
+            //System.out.println(signalBuffer.currentValue() + "   " + currentValue + "   " + pipeline.currentValue());
+
+            Integer sum = sumMap.get(cellID);
+            if (sum == null && pedestalSubtractedValue > triggerThreshold) {
+                timeMap.put(cellID, readoutCounter);
+                if (constantTriggerWindow) {
+                    int sumBefore = 0;
+                    for (int i = 0; i < numSamplesBefore; i++) {
+                        if (debug) {
+                            System.out.format("trigger %d, %d: %d\n", cellID, i, pipeline.getValue(numSamplesBefore - i - 1));
+                        }
+                        sumBefore += pipeline.getValue(numSamplesBefore - i - 1);
+                    }
+                    sumMap.put(cellID, sumBefore);
+                } else {
+                    sumMap.put(cellID, pedestalSubtractedValue);
+                }
+            }
+            if (sum != null) {
+                if (constantTriggerWindow) {
+                    if (timeMap.get(cellID) + numSamplesAfter >= readoutCounter) {
+                        if (debug) {
+                            System.out.format("trigger %d, %d: %d\n", cellID, readoutCounter - timeMap.get(cellID) + numSamplesBefore - 1, pipeline.getValue(0));
+                        }
+                        sumMap.put(cellID, sum + pipeline.getValue(0));
+                    } else if (timeMap.get(cellID) + delay0 <= readoutCounter) {
+//                        System.out.printf("sum = %f\n", sum);
+                        outputQueue.add(new HPSRawCalorimeterHit(cellID,
+                                (int) Math.round(sum / scaleFactor),
+                                64 * timeMap.get(cellID),
+                                readoutCounter - timeMap.get(cellID) + 1));
+                        sumMap.remove(cellID);
+                    }
+                } else {
+                    if (pedestalSubtractedValue < triggerThreshold || timeMap.get(cellID) + delay0 == readoutCounter) {
+//					System.out.printf("sum = %f\n",sum);
+                        outputQueue.add(new HPSRawCalorimeterHit(cellID,
+                                (int) Math.round((sum + pedestalSubtractedValue) / scaleFactor),
+                                64 * timeMap.get(cellID),
+                                readoutCounter - timeMap.get(cellID) + 1));
+                        sumMap.remove(cellID);
+                    } else {
+                        sumMap.put(cellID, sum + pedestalSubtractedValue);
+                    }
+                }
+            }
+            signalBuffer.step();
+        }
+        while (outputQueue.peek() != null && outputQueue.peek().getTimeStamp() / 64 <= readoutCounter - delay0) {
+            if (outputQueue.peek().getTimeStamp() / 64 < readoutCounter - delay0) {
+                System.out.println("Stale hit in output queue");
+                outputQueue.poll();
+            } else {
+                buffer.add(outputQueue.poll());
+            }
+        }
+        while (!buffer.isEmpty() && buffer.peek().getTimeStamp() / 64 <= readoutCounter - delay0 - coincidenceWindow) {
+            buffer.remove();
+        }
+        if (debug) {
+            for (RawCalorimeterHit hit : buffer) {
+                System.out.format("new hit: energy %d\n", hit.getAmplitude());
+            }
+        }
+
+        hits.addAll(buffer);
+    }
+
+    @Override
+    public void startOfData() {
+        super.startOfData();
+        if (ecalReadoutCollectionName == null) {
+            throw new RuntimeException("The parameter ecalReadoutCollectionName was not set!");
+        }
+    }
+
+    @Override
+    protected void processTrigger(EventHeader event) {
+        switch (mode) {
+            case ECAL_WINDOW_MODE:
+                if (debug) {
+                    System.out.println("Reading out ECal in window mode");
+                }
+                event.put(ecalReadoutCollectionName, readWindow(), RawTrackerHit.class, 0, ecalReadoutName);
+                break;
+            case ECAL_PULSE_MODE:
+                if (debug) {
+                    System.out.println("Reading out ECal in pulse mode");
+                }
+                event.put(ecalReadoutCollectionName, readPulses(), RawTrackerHit.class, 0, ecalReadoutName);
+                break;
+            case ECAL_PULSE_INTEGRAL_MODE:
+                if (debug) {
+                    System.out.println("Reading out ECal in integral mode");
+                }
+                event.put(ecalReadoutCollectionName, readIntegrals(), RawCalorimeterHit.class, flags, ecalReadoutName);
+                break;
+        }
+    }
+
+    @Override
+    public double readoutDeltaT() {
+        double triggerTime = ClockSingleton.getTime() + triggerDelay;
+        int cycle = (int) Math.floor((triggerTime - readoutOffset + ClockSingleton.getDt()) / readoutPeriod);
+        double readoutTime = (cycle - readoutLatency) * readoutPeriod + readoutOffset - ClockSingleton.getDt();
+        return readoutTime;
+    }
+
+    protected short[] getWindow(long cellID) {
+        FADCPipeline pipeline = pipelineMap.get(cellID);
+        short[] adcValues = new short[readoutWindow];
+        for (int i = 0; i < readoutWindow; i++) {
+            adcValues[i] = (short) pipeline.getValue(readoutLatency - i - 1);
+//			if (adcValues[i] != 0) {
+//				System.out.println("getWindow: " + adcValues[i] + " at i = " + i);
+//			}
+        }
+        return adcValues;
+    }
+
+    protected List<RawTrackerHit> readWindow() {
+//		System.out.println("Reading FADC data");
+        List<RawTrackerHit> hits = new ArrayList<RawTrackerHit>();
+        for (Long cellID : pipelineMap.keySet()) {
+            short[] adcValues = getWindow(cellID);
+            hits.add(new BaseRawTrackerHit(cellID, 0, adcValues));
+        }
+        return hits;
+    }
+
+    protected List<RawTrackerHit> readPulses() {
+//		System.out.println("Reading FADC data");
+        List<RawTrackerHit> hits = new ArrayList<RawTrackerHit>();
+        for (Long cellID : pipelineMap.keySet()) {
+            short[] window = getWindow(cellID);
+            short[] adcValues = null;
+            int pointerOffset = 0;
+            int numSamplesToRead = 0;
+            int thresholdCrossing = 0;
+            
+            // Get the channel data.
+            EcalChannelConstants channelData = findChannel(cellID);
+            
+            for (int i = 0; i < readoutWindow; i++) {
+                if (numSamplesToRead != 0) {
+                    adcValues[adcValues.length - numSamplesToRead] = window[i - pointerOffset];
+                    numSamplesToRead--;
+                    if (numSamplesToRead == 0) {
+                        hits.add(new BaseRawTrackerHit(cellID, thresholdCrossing, adcValues));
+                    }
+                } else if ((i == 0 || window[i - 1] <= channelData.getCalibration().getPedestal() + readoutThreshold) && window[i] > channelData.getCalibration().getPedestal() + readoutThreshold) {
+                    thresholdCrossing = i;
+                    pointerOffset = Math.min(numSamplesBefore, i);
+                    numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, readoutWindow - i - pointerOffset - 1);
+                    adcValues = new short[numSamplesToRead];
+                }
+            }
+        }
+        return hits;
+    }
+
+    protected List<RawCalorimeterHit> readIntegrals() {
+//		System.out.println("Reading FADC data");
+        List<RawCalorimeterHit> hits = new ArrayList<RawCalorimeterHit>();
+        for (Long cellID : pipelineMap.keySet()) {
+            short[] window = getWindow(cellID);
+            int adcSum = 0;
+            int pointerOffset = 0;
+            int numSamplesToRead = 0;
+            int thresholdCrossing = 0;
+            
+            // Get the channel data.
+            EcalChannelConstants channelData = findChannel(cellID);
+            
+            if (window != null) {
+                for (int i = 0; i < readoutWindow; i++) {
+                    if (numSamplesToRead != 0) {
+                        if (debug) {
+                            System.out.format("readout %d, %d: %d\n", cellID, numSamplesBefore + numSamplesAfter - numSamplesToRead, window[i - pointerOffset]);
+                        }
+                        adcSum += window[i - pointerOffset];
+                        numSamplesToRead--;
+                        if (numSamplesToRead == 0) {
+                            hits.add(new BaseRawCalorimeterHit(cellID, adcSum, 64 * thresholdCrossing));
+                        }
+                    } else if ((i == 0 || window[i - 1] <= channelData.getCalibration().getPedestal() + readoutThreshold) && window[i] > channelData.getCalibration().getPedestal() + readoutThreshold) {
+                        thresholdCrossing = i;
+                        pointerOffset = Math.min(numSamplesBefore, i);
+                        numSamplesToRead = pointerOffset + Math.min(numSamplesAfter, readoutWindow - i - pointerOffset - 1);
+                        adcSum = 0;
+                    }
+                }
+            }
+        }
+        return hits;
+    }
+
+    @Override
+    protected void putHits(List<CalorimeterHit> hits) {
+        //fill the readout buffers
+        for (CalorimeterHit hit : hits) {
+            RingBuffer eDepBuffer = signalMap.get(hit.getCellID());
+            double energyAmplitude = hit.getRawEnergy();
+            // Get the channel data.
+            EcalChannelConstants channelData = findChannel(hit.getCellID());
+            if (addNoise) {
+                //add preamp noise and photoelectron Poisson noise in quadrature
+                double noise;
+                if (use2014Gain) {
+                    noise = Math.sqrt(Math.pow(channelData.getCalibration().getNoise() * channelData.getGain().getGain() * ECalUtils.gainFactor * ECalUtils.ecalReadoutPeriod, 2) + hit.getRawEnergy() / (ECalUtils.lightYield * ECalUtils.quantumEff * ECalUtils.surfRatio));
+                } else {
+                    noise = Math.sqrt(Math.pow(channelData.getCalibration().getNoise() * channelData.getGain().getGain() * ECalUtils.MeV, 2) + hit.getRawEnergy() * ECalUtils.MeV / pePerMeV);
+                }
+                energyAmplitude += RandomGaussian.getGaussian(0, noise);
+            }
+            for (int i = 0; i < bufferLength; i++) {
+                eDepBuffer.addToCell(i, energyAmplitude * pulseAmplitude((i + 1) * readoutPeriod + readoutTime() - (ClockSingleton.getTime() + hit.getTime()), hit.getCellID()));
+            }
+        }
+    }
+
+    @Override
+    protected void initReadout() {
+        //initialize buffers
+        sumMap = new HashMap<Long, Integer>();
+        timeMap = new HashMap<Long, Integer>();
+        outputQueue = new PriorityQueue(20, new HPSRawCalorimeterHit.TimeComparator());
+        resetFADCBuffers();
+    }
+
+    @Override
+    public void detectorChanged(Detector detector) {
+        // Get the Subdetector.
+        ecal = detector.getSubdetector(ecalName);
+        
+        // ECAL combined conditions object.
+        ecalConditions = ConditionsManager.defaultInstance()
+                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
+        
+        // List of channels.
+        channels = ecalConditions.getChannelCollection();
+        
+        // ID helper.
+        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
+        
+        resetFADCBuffers();
+    }
+
+    private boolean resetFADCBuffers() {
+        if (ecal == null) {
+            return false;
+        }
+        signalMap = new HashMap<Long, RingBuffer>();
+        pipelineMap = new HashMap<Long, FADCPipeline>();
+        Set<Long> cells = ((HPSEcal3) ecal).getNeighborMap().keySet();
+        for (Long cellID : cells) {
+        	EcalChannelConstants channelData = findChannel(cellID);
+            signalMap.put(cellID, new RingBuffer(bufferLength));
+            pipelineMap.put(cellID, new FADCPipeline(pipelineLength, (int) Math.round(channelData.getCalibration().getPedestal())));
+        }
+        return true;
+    }
+
+    private double pulseAmplitude(double time, long cellID) {
+    	
+    	EcalChannelConstants channelData = findChannel(cellID);
+    	
+        if (use2014Gain) {
+            //if fixedGain is set, multiply the default gain by this factor
+            double corrGain;
+            if (fixedGain > 0) {
+                corrGain = fixedGain;
+            } else {
+                corrGain = 1.0 / channelData.getGain().getGain();
+            }
+
+            return corrGain * readoutGain * pulseAmplitude(time, pulseShape, tp);
+        } else {
+            //normalization constant from cal gain (MeV/integral bit) to amplitude gain (amplitude bit/GeV)
+            double gain;
+            if (fixedGain > 0) {
+                gain = readoutPeriod / (fixedGain * ECalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt));
+            } else {
+                gain = readoutPeriod / (channelData.getGain().getGain() * ECalUtils.MeV * ((Math.pow(2, nBit) - 1) / maxVolt));
+            }
+
+            return gain * pulseAmplitude(time, pulseShape, tp);
+        }
+    }
+
+    /**
+     * Returns pulse amplitude at the given time (relative to hit time).
+     * Amplitude is normalized so the pulse integral is 1.
+     *
+     * @param time
+     * @return
+     */
+    public static double pulseAmplitude(double time, PulseShape shape, double shapingTime) {
+        if (time <= 0.0) {
+            return 0.0;
+        }
+        switch (shape) {
+            case CRRC:
+                //peak at tp
+                //peak value 1/(tp*e)
+                return ((time / (shapingTime * shapingTime)) * Math.exp(-time / shapingTime));
+            case DoubleGaussian:
+                //According to measurements the output signal can be fitted by two gaussians, one for the rise of the signal, one for the fall
+                //peak at 3*riseTime
+                //peak value 1/norm
+
+                double norm = ((riseTime + fallTime) / 2) * Math.sqrt(2 * Math.PI); //to ensure the total integral is equal to 1: = 33.8
+                return funcGaus(time - 3 * riseTime, (time < 3 * riseTime) ? riseTime : fallTime) / norm;
+            case ThreePole:
+                //peak at 2*tp
+                //peak value 2/(tp*e^2)
+                return ((time * time / (2 * shapingTime * shapingTime * shapingTime)) * Math.exp(-time / shapingTime));
+            default:
+                return 0.0;
+        }
+    }
+
+    // Gaussian function needed for the calculation of the pulse shape amplitude  
+    public static double funcGaus(double t, double sig) {
+        return Math.exp(-t * t / (2 * sig * sig));
+    }
+
+    public class FADCPipeline {
+
+        private int[] array;
+        private int size;
+        private int ptr;
+
+        public FADCPipeline(int size) {
+            this.size = size;
+            array = new int[size]; //initialized to 0
+            ptr = 0;
+        }
+
+        //construct pipeline with a nonzero initial value
+        public FADCPipeline(int size, int init) {
+            this.size = size;
+            array = new int[size];
+            for (int i = 0; i < size; i++) {
+                array[i] = init;
+            }
+            ptr = 0;
+        }
+
+        /**
+         * Write value to current cell
+         */
+        public void writeValue(int val) {
+            array[ptr] = val;
+        }
+
+        /**
+         * Write value to current cell
+         */
+        public void step() {
+            ptr++;
+            if (ptr == size) {
+                ptr = 0;
+            }
+        }
+
+        //return content of specified cell (pos=0 for current cell)
+        public int getValue(int pos) {
+            if (pos >= size || pos < 0) {
+                throw new ArrayIndexOutOfBoundsException();
+            }
+            return array[((ptr - pos) % size + size) % size];
+        }
+    }
+    
+    // Convert physical ID to gain value.
+    private EcalChannelConstants findChannel(long cellID) {
+        // Make an ID object from raw hit ID.
+        IIdentifier id = new Identifier(cellID);
+        
+        // Get physical field values.
+        int system = helper.getValue(id, "system");
+        int x = helper.getValue(id, "ix");
+        int y = helper.getValue(id, "iy");
+        
+        // Create an ID to search for in channel collection.
+        GeometryId geometryId = new GeometryId(helper, new int[] { system, x, y });
+        
+        // Find the ECAL channel.
+//        return channels.findChannel(geometryId);
+                
+        // Get the channel data.
+        return ecalConditions.getChannelConstants(channels.findChannel(geometryId));
+    }
+    
+}
+

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
FADCPrimaryTriggerDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCPrimaryTriggerDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCPrimaryTriggerDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,889 @@
+package org.hps.readout.ecal;
+
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+
+import org.hps.recon.ecal.ECalUtils;
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.aida.AIDA;
+
+/**
+ * Class <code>FADCPrimaryTriggerDriver</code> reads reconstructed
+ * clusters and makes trigger decisions on them. It is designed to
+ * trigger off 2.2 GeV beam A' events. Cuts can either be set manually
+ * in a steering file or automatically by specifying a background level.
+ * The code for generating trigger pairs and handling the coincidence
+ * window comes from <code>FADCTriggerDriver</code>.
+ * 
+ * @author Kyle McCarty
+ * @see FADCTriggerDriver
+ */
+public class FADCPrimaryTriggerDriver extends TriggerDriver {
+    // ==================================================================
+    // ==== Trigger Cut Default Parameters ==============================
+    // ==================================================================
+    private int minHitCount = 1;								// Minimum required cluster hit count threshold. (Hits)			
+    private double seedEnergyHigh = Double.MAX_VALUE;			// Maximum allowed cluster seed energy. (GeV)
+    private double seedEnergyLow = Double.MIN_VALUE;			// Minimum required cluster seed energy. (GeV)
+    private double clusterEnergyHigh = 1.5 * ECalUtils.GeV;		// Maximum allowed cluster total energy. (GeV)
+    private double clusterEnergyLow = .1 * ECalUtils.GeV;		// Minimum required cluster total energy. (GeV)
+    private double energySumHigh = 1.9 * ECalUtils.GeV;			// Maximum allowed pair energy sum. (GeV)
+    private double energySumLow = 0.0 * ECalUtils.GeV;			// Minimum required pair energy sum. (GeV)
+    private double energyDifferenceHigh = 2.2 * ECalUtils.GeV;	// Maximum allowed pair energy difference. (GeV)
+    private double energySlopeLow = 1.1;						// Minimum required pair energy slope value.
+    private double coplanarityHigh = 35;						// Maximum allowed pair coplanarity deviation. (Degrees)
+    
+    // ==================================================================
+    // ==== Trigger General Default Parameters ==========================
+    // ==================================================================
+    private String clusterCollectionName = "EcalClusters";		// Name for the LCIO cluster collection.
+    private int pairCoincidence = 2;							// Maximum allowed time difference between clusters. (4 ns clock-cycles)
+    private double energySlopeParamF = 0.005500;				// A parameter value used for the energy slope calculation.
+    private double originX = 1393.0 * Math.tan(0.03052);		// ECal mid-plane, defined by photon beam position (30.52 mrad) at ECal face (z=1393 mm)
+    private int backgroundLevel = -1;							// Automatically sets the cuts to achieve a predetermined background rate.
+    
+    // ==================================================================
+    // ==== Driver Internal Variables ===================================
+    // ==================================================================
+    private Queue<List<HPSEcalCluster>> topClusterQueue = null;	// Store clusters on the top half of the calorimeter.
+    private Queue<List<HPSEcalCluster>> botClusterQueue = null;	// Store clusters on the bottom half of the calorimeter.
+    private int allClusters = 0;								// Track the number of clusters processed.
+    private int allPairs = 0;									// Track the number of cluster pairs processed.
+    private int clusterTotalEnergyCount = 0;					// Track the clusters which pass the total energy cut.
+    private int clusterSeedEnergyCount = 0;						// Track the clusters which pass the seed energy cut.
+    private int clusterHitCountCount = 0;						// Track the clusters which pass the hit count cut.
+    private int pairEnergySumCount = 0;							// Track the pairs which pass the energy sum cut.
+    private int pairEnergyDifferenceCount = 0;					// Track the pairs which pass the energy difference cut.
+    private int pairEnergySlopeCount = 0;						// Track the pairs which pass the energy slope cut.
+    private int pairCoplanarityCount = 0;						// Track the pairs which pass the coplanarity cut.
+    
+    // ==================================================================
+    // ==== Trigger Distribution Plots ==================================
+    // ==================================================================
+	private AIDA aida = AIDA.defaultInstance();
+    IHistogram1D clusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution", 176, 0.0, 2.2);
+    IHistogram1D clusterSeedEnergy100 = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Over 100 MeV)", 176, 0.0, 2.2);
+    IHistogram1D clusterSeedEnergySingle = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
+    IHistogram1D clusterSeedEnergyAll = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+    IHistogram1D clusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution", 9, 1, 10);
+    IHistogram1D clusterHitCount100 = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Over 100 MeV)", 9, 1, 10);
+    IHistogram1D clusterHitCountSingle = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed Single Cuts)", 9, 1, 10);
+    IHistogram1D clusterHitCountAll = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed All Cuts)", 9, 1, 10);
+    IHistogram1D clusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution", 176, 0.0, 2.2);
+    IHistogram1D clusterTotalEnergy100 = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Over 100 MeV)", 176, 0.0, 2.2);
+    IHistogram1D clusterTotalEnergySingle = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
+    IHistogram1D clusterTotalEnergyAll = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+    
+    IHistogram1D pairEnergySum = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution", 176, 0.0, 4.4);
+    IHistogram1D pairEnergySumAll = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution (Passed All Cuts)", 176, 0.0, 4.4);    
+    IHistogram1D pairEnergyDifference = aida.histogram1D("Trigger Plots :: Pair Energy Difference Distribution", 176, 0.0, 2.2);
+    IHistogram1D pairEnergyDifferenceAll = aida.histogram1D("Trigger Plots :: Pair Energy Difference Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+    IHistogram1D pairCoplanarity = aida.histogram1D("Trigger Plots :: Pair Coplanarity Distribution", 360, 0.0, 180.0);
+    IHistogram1D pairCoplanarityAll = aida.histogram1D("Trigger Plots :: Pair Coplanarity Distribution (Passed All Cuts)", 360, 0.0, 180.0);
+    IHistogram1D pairEnergySlope = aida.histogram1D("Trigger Plots :: Pair Energy Slope Distribution", 400, 0.0, 4.0);
+    IHistogram1D pairEnergySlopeAll = aida.histogram1D("Trigger Plots :: Pair Energy Slope Distribution (Passed All Cuts)", 400, 0.0, 4.0);
+    
+	IHistogram2D clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 44, -22.0, 22.0, 10, -5, 5);
+	IHistogram2D clusterDistribution100 = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Over 100 MeV)", 44, -23, 23, 11, -5.5, 5.5);
+	IHistogram2D clusterDistributionSingle = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+	IHistogram2D clusterDistributionAll = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+    
+    /**
+     * Prints out the results of the trigger at the end of the run.
+     */
+    @Override
+    public void endOfData() {
+    	// Print out the results of the trigger cuts.
+    	System.out.printf("Trigger Processing Results%n");
+    	System.out.printf("\tSingle-Cluster Cuts%n");
+    	System.out.printf("\t\tTotal Clusters Processed     :: %d%n", allClusters);
+    	System.out.printf("\t\tPassed Seed Energy Cut       :: %d%n", clusterSeedEnergyCount);
+    	System.out.printf("\t\tPassed Hit Count Cut         :: %d%n", clusterHitCountCount);
+    	System.out.printf("\t\tPassed Total Energy Cut      :: %d%n", clusterTotalEnergyCount);
+    	System.out.printf("%n");
+    	System.out.printf("\tCluster Pair Cuts%n");
+    	System.out.printf("\t\tTotal Pairs Processed        :: %d%n", allPairs);
+    	System.out.printf("\t\tPassed Energy Sum Cut        :: %d%n", pairEnergySumCount);
+    	System.out.printf("\t\tPassed Energy Difference Cut :: %d%n", pairEnergyDifferenceCount);
+    	System.out.printf("\t\tPassed Energy Slope Cut      :: %d%n", pairEnergySlopeCount);
+    	System.out.printf("\t\tPassed Coplanarity Cut       :: %d%n", pairCoplanarityCount);
+    	System.out.printf("%n");
+    	System.out.printf("\tTrigger Count :: %d%n", numTriggers);
+    	
+    	// Run the superclass method.
+        super.endOfData();
+    }
+    
+    /**
+     * Performs single cluster cuts for the event and passes any clusters
+     * which survive to be formed into cluster pairs for the trigger.
+     */
+    @Override
+    public void process(EventHeader event) {
+    	// Process the list of clusters for the event, if it exists.
+        if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+        	// Get the collection of clusters.
+        	List<HPSEcalCluster> clusterList = event.get(HPSEcalCluster.class, clusterCollectionName);
+        	
+        	// Create a list to hold clusters which pass the single
+        	// cluster cuts.
+        	List<HPSEcalCluster> goodClusterList = new ArrayList<HPSEcalCluster>(clusterList.size());
+        	
+        	// Sort through the cluster list and add clusters that pass
+        	// the single cluster cuts to the good list.
+        	clusterLoop:
+        	for(HPSEcalCluster cluster : clusterList) {
+        		// Increment the number of processed clusters.
+        		allClusters++;
+        		
+        		// Get the cluster plot values.
+        		int hitCount = cluster.getCalorimeterHits().size();
+        		double seedEnergy = cluster.getSeedHit().getCorrectedEnergy();
+        		double clusterEnergy = cluster.getEnergy();
+        		int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
+        		int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
+        		if(ix > 0) { ix = ix - 1; }
+        		
+        		// Fill the general plots.
+        		clusterSeedEnergy.fill(seedEnergy, 1);
+        		clusterTotalEnergy.fill(clusterEnergy, 1);
+        		clusterHitCount.fill(hitCount, 1);
+        		clusterDistribution.fill(ix, iy, 1);
+        		
+        		// Fill the "over 100 MeV" plots if applicable.
+        		if(seedEnergy >= 0.100) {
+            		clusterSeedEnergy100.fill(seedEnergy, 1);
+            		clusterTotalEnergy100.fill(clusterEnergy, 1);
+            		clusterHitCount100.fill(hitCount, 1);
+            		clusterDistribution100.fill(ix, iy, 1);
+        		}
+        		
+        		// ==== Seed Hit Energy Cut ====================================
+        		// =============================================================
+        		// If the cluster fails the cut, skip to the next cluster.
+        		if(!clusterSeedEnergyCut(cluster)) { continue clusterLoop; }
+        		
+        		// Otherwise, note that it passed the cut.
+        		clusterSeedEnergyCount++;
+        		
+        		// ==== Cluster Hit Count Cut ==================================
+        		// =============================================================
+        		// If the cluster fails the cut, skip to the next cluster.
+        		if(!clusterHitCountCut(cluster)) { continue clusterLoop; }
+        		
+        		// Otherwise, note that it passed the cut.
+        		clusterHitCountCount++;
+        		
+        		// ==== Cluster Total Energy Cut ===============================
+        		// =============================================================
+        		// If the cluster fails the cut, skip to the next cluster.
+        		if(!clusterTotalEnergyCut(cluster)) { continue clusterLoop; }
+        		
+        		// Otherwise, note that it passed the cut.
+        		clusterTotalEnergyCount++;
+        		
+        		// Fill the "passed single cuts" plots.
+        		clusterSeedEnergySingle.fill(seedEnergy, 1);
+        		clusterTotalEnergySingle.fill(clusterEnergy, 1);
+        		clusterHitCountSingle.fill(hitCount, 1);
+        		clusterDistributionSingle.fill(ix, iy, 1);
+        		
+        		// A cluster that passes all of the single-cluster cuts
+        		// can be used in cluster pairs.
+        		goodClusterList.add(cluster);
+        	}
+        	
+        	// Put the good clusters into the cluster queue.
+        	updateClusterQueues(goodClusterList);
+        }
+        
+        // Perform the superclass event processing.
+        super.process(event);
+    }
+    
+    /**
+     * Sets the trigger cuts automatically to a given background level.
+     * 
+     * @param backgroundLevel - The level to which the background should
+     * be set. Actual background rates equal about (5 * backgroundLevel) kHz.
+     */
+    public void setBackgroundLevel(int backgroundLevel) {
+    	this.backgroundLevel = backgroundLevel;
+    }
+    
+    /**
+     * Sets the name of the LCIO collection that contains the clusters.
+     * 
+     * @param clusterCollectionName - The cluster LCIO collection name.
+     */
+    public void setClusterCollectionName(String clusterCollectionName) {
+        this.clusterCollectionName = clusterCollectionName;
+    }
+    
+    /**
+     * Sets the highest allowed energy a cluster may have and still
+     * pass the cluster total energy single cluster cut. Value uses
+     * units of GeV.
+     *
+     * @param clusterEnergyHigh - The parameter value.
+     */
+    public void setClusterEnergyHigh(double clusterEnergyHigh) {
+        this.clusterEnergyHigh = clusterEnergyHigh * ECalUtils.GeV;
+    }
+    
+    /**
+     * Sets the lowest allowed energy a cluster may have and still
+     * pass the cluster total energy single cluster cut. Value uses
+     * units of GeV.
+     *
+     * @param clusterEnergyLow - The parameter value.
+     */
+    public void setClusterEnergyLow(double clusterEnergyLow) {
+        this.clusterEnergyLow = clusterEnergyLow * ECalUtils.GeV;
+    }
+    
+    /**
+     * Sets the maximum deviation from coplanarity that a cluster pair
+     * may possess and still pass the coplanarity pair cut. Value uses
+     * units of degrees.
+     *
+     * @param maxCoplanarityAngle - The parameter value.
+     */
+    public void setCoplanarityHigh(double coplanarityHigh) {
+        this.coplanarityHigh = coplanarityHigh;
+    }
+    
+    /**
+     * Sets the highest allowed energy difference a cluster pair may
+     * have and still pass the cluster pair energy difference cut.
+     * Value uses units of GeV.
+     *
+     * @param energyDifferenceHigh - The parameter value.
+     */
+    public void setEnergyDifferenceHigh(double energyDifferenceHigh) {
+        this.energyDifferenceHigh = energyDifferenceHigh * ECalUtils.GeV;
+    }
+    
+    /**
+     * Sets the lowest allowed energy slope a cluster pair may
+     * have and still pass the cluster pair energy slope cut.
+     *
+     * @param energySlopeLow - The parameter value.
+     */
+    public void setEnergySlopeLow(double energySlopeLow) {
+    	this.energySlopeLow = energySlopeLow;
+    }
+    
+    /**
+     * Sets the highest allowed energy a cluster pair may have and
+     * still pass the cluster pair energy sum cluster cut. Value uses
+     * units of GeV.
+     *
+     * @param energySumHigh - The parameter value.
+     */
+    public void setEnergySumHigh(double energySumHigh) {
+        this.energySumHigh = energySumHigh * ECalUtils.GeV;
+    }
+    
+    /**
+     * Sets the lowest allowed energy a cluster pair may have and
+     * still pass the cluster pair energy sum cluster cut. Value uses
+     * units of GeV.
+     *
+     * @param energySumHigh - The parameter value.
+     */
+    public void setEnergySumLow(double energySumLow) {
+        this.energySumLow = energySumLow * ECalUtils.GeV;
+    }
+    
+    /**
+     * Sets the minimum number of hits needed for a cluster to pass
+     * the hit count single cluster cut.
+     *
+     * @param minHitCount - The parameter value.
+     */
+    public void setMinHitCount(int minHitCount) {
+        this.minHitCount = minHitCount;
+    }
+    
+    /**
+     * Sets X coordinate used as the origin for cluster coplanarity and
+     * slope calculations. This defaults to the calorimeter mid-plane
+     * and is in units of millimeters.
+     *
+     * @param originX - The parameter value.
+     */
+    public void setOriginX(double originX) {
+        this.originX = originX;
+    }
+    
+    /**
+     * Sets the time range over which cluster pairs will be formed.
+     * Value uses units of clock-cycles. Note that the number of
+     * clock-cycles used is calculated as (2 * pairCoincidence) + 1.
+     * 
+     * @param pairCoincidence - The parameter value.
+     */
+    public void setPairCoincidence(int pairCoincidence) {
+        this.pairCoincidence = pairCoincidence;
+    }
+    
+    /**
+     * Sets the highest allowed energy a seed hit may have and still
+     * pass the seed hit energy single cluster cut. Value uses units
+     * of GeV.
+     *
+     * @param seedEnergyHigh - The parameter value.
+     */
+    public void setSeedEnergyHigh(double seedEnergyHigh) {
+        this.seedEnergyHigh = seedEnergyHigh * ECalUtils.GeV;
+    }
+    
+    /**
+     * Sets the lowest allowed energy a seed hit may have and still
+     * pass the seed hit energy single cluster cut. Value uses units
+     * of GeV.
+     *
+     * @param seedEnergyLow - The parameter value.
+     */
+    public void setSeedEnergyLow(double seedEnergyLow) {
+        this.seedEnergyLow = seedEnergyLow * ECalUtils.GeV;
+    }
+    
+    /**
+     * Initializes the cluster pair queues and other variables.
+     */
+    @Override
+    public void startOfData() {
+    	// Make sure that a valid cluster collection name has been
+    	// defined. If it has not, throw an exception.
+        if (clusterCollectionName == null) {
+            throw new RuntimeException("The parameter clusterCollectionName was not set!");
+        }
+    	
+        // Initialize the top and bottom cluster queues.
+        topClusterQueue = new LinkedList<List<HPSEcalCluster>>();
+        botClusterQueue = new LinkedList<List<HPSEcalCluster>>();
+        
+        // Populate the top cluster queue. It should be populated with
+        // a number of empty lists equal to (2 * pairCoincidence + 1).
+        for (int i = 0; i < 2 * pairCoincidence + 1; i++) {
+            topClusterQueue.add(new ArrayList<HPSEcalCluster>());
+        }
+        
+        // Populate the bottom cluster queue. It should be populated with
+        // a number of empty lists equal to (2 * pairCoincidence + 1).
+        for (int i = 0; i < pairCoincidence + 1; i++) {
+            botClusterQueue.add(new ArrayList<HPSEcalCluster>());
+        }
+        
+        // If a background level has been set, pick the correct cuts.
+        if(backgroundLevel != -1) { setBackgroundCuts(backgroundLevel); }
+        
+        // Run the superclass method.
+        super.startOfData();
+    }
+
+    /**
+     * Get a list of all unique cluster pairs in the event
+     *
+     * @param ecalClusters : List of ECal clusters
+     * @return list of cluster pairs
+     */
+    protected List<HPSEcalCluster[]> getClusterPairsTopBot() {
+        // Create a list to store cluster pairs. 
+        List<HPSEcalCluster[]> clusterPairs = new ArrayList<HPSEcalCluster[]>();
+        
+        // Loop over all top-bottom pairs of clusters; higher-energy cluster goes first in the pair
+        // To apply pair coincidence time, use only bottom clusters from the 
+        // readout cycle pairCoincidence readout cycles ago, and top clusters 
+        // from all 2*pairCoincidence+1 previous readout cycles
+        for (HPSEcalCluster botCluster : botClusterQueue.element()) {
+            for (List<HPSEcalCluster> topClusters : topClusterQueue) {
+                for (HPSEcalCluster topCluster : topClusters) {
+                	// The first cluster in a pair should always be
+                	// the higher energy cluster. If the top cluster
+                	// is higher energy, it goes first.
+                    if (topCluster.getEnergy() > botCluster.getEnergy()) {
+                        HPSEcalCluster[] clusterPair = {topCluster, botCluster};
+                        clusterPairs.add(clusterPair);
+                    }
+                    
+                    // Otherwise, the bottom cluster goes first.
+                    else {
+                        HPSEcalCluster[] clusterPair = {botCluster, topCluster};
+                        clusterPairs.add(clusterPair);
+                    }
+                }
+            }
+        }
+        
+        // Return the cluster pair lists.
+        return clusterPairs;
+    }
+    
+	/**
+	 * Determines if the event produces a trigger.
+	 * 
+	 * @return Returns <code>true</code> if the event produces a trigger
+	 * and <code>false</code> if it does not.
+	 */
+	@Override
+	protected boolean triggerDecision(EventHeader event) {
+    	// If there is a list of clusters present for this event,
+    	// check whether it passes the trigger conditions.
+    	if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+        	return testTrigger();
+        }
+        
+        // Otherwise, this event can not produce a trigger and should
+        // return false automatically.
+        else { return false; }
+	}
+    
+    /**
+     * Checks whether the argument cluster possesses the minimum
+     * allowed hits.
+     * 
+     * @param cluster - The cluster to check.
+     * @return Returns <code>true</code> if the cluster passes the cut
+     * and <code>false</code> if the cluster does not.
+     */
+    private boolean clusterHitCountCut(HPSEcalCluster cluster) {
+    	return (getValueClusterHitCount(cluster) >= minHitCount);
+    }
+    
+    /**
+     * Checks whether the argument cluster seed hit falls within the
+     * allowed seed hit energy range.
+     * 
+     * @param cluster - The cluster to check.
+     * @return Returns <code>true</code> if the cluster passes the cut
+     * and <code>false</code> if the cluster does not.
+     */
+    private boolean clusterSeedEnergyCut(HPSEcalCluster cluster) {
+    	// Get the cluster seed energy.
+    	double energy = getValueClusterSeedEnergy(cluster);
+    	
+    	// Check that it is above the minimum threshold and below the
+    	// maximum threshold.
+    	return (energy < seedEnergyHigh) && (energy > seedEnergyLow);
+    }
+    
+    /**
+     * Checks whether the argument cluster falls within the allowed
+     * cluster total energy range.
+     * 
+     * @param cluster - The cluster to check.
+     * @return Returns <code>true</code> if the cluster passes the cut
+     * and <code>false</code> if the cluster does not.
+     */
+    private boolean clusterTotalEnergyCut(HPSEcalCluster cluster) {
+    	// Get the total cluster energy.
+    	double energy = getValueClusterTotalEnergy(cluster);
+    	
+    	// Check that it is above the minimum threshold and below the
+    	// maximum threshold.
+    	return (energy < clusterEnergyHigh) && (energy > clusterEnergyLow);
+    }
+    
+    /**
+     * Calculates the distance between two clusters.
+     * 
+     * @param clusterPair - The cluster pair from which the value should
+     * be calculated.
+     * @return Returns the distance between the clusters.
+     */
+    private double getClusterDistance(HPSEcalCluster cluster) {
+        return Math.hypot(cluster.getSeedHit().getPosition()[0] - originX, cluster.getSeedHit().getPosition()[1]);
+    }
+    
+    /**
+     * Gets the value used for the cluster total energy cut.
+     * 
+     * @param cluster - The cluster from which the value should be
+     * derived.
+     * @return Returns the cut value.
+     */
+    private double getValueClusterTotalEnergy(HPSEcalCluster cluster) {
+    	return cluster.getEnergy();
+    }
+    
+    /**
+     * Gets the value used for the cluster hit count cut.
+     * 
+     * @param cluster - The cluster from which the value should be
+     * derived.
+     * @return Returns the cut value.
+     */
+    private int getValueClusterHitCount(HPSEcalCluster cluster) {
+    	return cluster.getCalorimeterHits().size();
+    }
+    
+    /**
+     * Gets the value used for the seed hit energy cut.
+     * 
+     * @param cluster - The cluster from which the value should be
+     * derived.
+     * @return Returns the cut value.
+     */
+    private double getValueClusterSeedEnergy(HPSEcalCluster cluster) {
+    	return cluster.getSeedHit().getCorrectedEnergy();
+    }
+    
+    /**
+     * Calculates the value used by the coplanarity cut.
+     * 
+     * @param clusterPair - The cluster pair from which the value should
+     * be calculated.
+     * @return Returns the cut value.
+     */
+    private double getValueCoplanarity(HPSEcalCluster[] clusterPair) {
+    	// Get the cluster angles.
+    	double[] clusterAngle = new double[2];
+    	for(int i = 0; i < 2; i++) {
+            double position[] = clusterPair[i].getSeedHit().getPosition();
+            clusterAngle[i] = (Math.toDegrees(Math.atan2(position[1], position[0] - originX)) + 180.0) % 180.0;
+    	}
+    	
+    	// Calculate the coplanarity cut value.
+        return Math.abs(clusterAngle[1] - clusterAngle[0]);
+    }
+    
+    /**
+     * Calculates the value used by the energy difference cut.
+     * 
+     * @param clusterPair - The cluster pair from which the value should
+     * be calculated.
+     * @return Returns the cut value.
+     */
+    private double getValueEnergyDifference(HPSEcalCluster[] clusterPair) {
+    	return clusterPair[0].getEnergy() - clusterPair[1].getEnergy();
+    }
+    
+    /**
+     * Calculates the value used by the energy slope cut.
+     * 
+     * @param clusterPair - The cluster pair from which the value should
+     * be calculated.
+     * @return Returns the cut value.
+     */
+    private double getValueEnergySlope(HPSEcalCluster[] clusterPair) {
+    	// E + R*F
+    	// Get the low energy cluster energy.
+    	double slopeParamE = clusterPair[1].getEnergy();
+    	
+    	// Get the low energy cluster radial distance.
+    	double slopeParamR = getClusterDistance(clusterPair[1]);
+    	
+    	// Calculate the energy slope.
+    	return slopeParamE + slopeParamR * energySlopeParamF;
+    }
+    
+    /**
+     * Calculates the value used by the energy sum cut.
+     * 
+     * @param clusterPair - The cluster pair from which the value should
+     * be calculated.
+     * @return Returns the cut value.
+     */
+    private double getValueEnergySum(HPSEcalCluster[] clusterPair) {
+    	return clusterPair[0].getEnergy() + clusterPair[1].getEnergy();
+    }
+    
+    /**
+     * Checks if a cluster pair is coplanar to the beam within a given
+     * angle.
+     *
+     * @param clusterPair - The cluster pair to check.
+     * @return Returns <code>true</code> if the cluster pair passes
+     * the cut and <code>false</code> if it does not.
+     */
+    private boolean pairCoplanarityCut(HPSEcalCluster[] clusterPair) {
+        return (getValueCoplanarity(clusterPair) < coplanarityHigh);
+    }
+    
+    /**
+     * Checks if the energy difference between the clusters making up
+     * a cluster pair is below an energy difference threshold.
+     *
+     * @param clusterPair - The cluster pair to check.
+     * @return Returns <code>true</code> if the cluster pair passes
+     * the cut and <code>false</code> if it does not.
+     */
+    private boolean pairEnergyDifferenceCut(HPSEcalCluster[] clusterPair) {
+        return (getValueEnergyDifference(clusterPair) < energyDifferenceHigh);
+    }
+    
+    /**
+     * Requires that the distance from the beam of the lowest energy
+     * cluster in a cluster pair satisfies the following:
+     * E_low + d_b*.0032 GeV/mm < [ Threshold ]
+     *
+     * @param clusterPair : pair of clusters
+     * @return true if pair is found, false otherwise
+     */
+    private boolean pairEnergySlopeCut(HPSEcalCluster[] clusterPair) {
+    	return (getValueEnergySlope(clusterPair) > energySlopeLow);
+    }
+    
+    /**
+     * Checks if the sum of the energies of clusters making up a cluster
+     * pair is below an energy sum threshold.
+     *
+     * @param clusterPair - The cluster pair to check.
+     * @return Returns <code>true</code> if the cluster pair passes
+     * the cut and <code>false</code> if it does not.
+     */
+    private boolean pairEnergySumCut(HPSEcalCluster[] clusterPair) {
+    	// Get the energy sum value.
+    	double energySum = getValueEnergySum(clusterPair);
+    	
+    	// Check that it is within the allowed range.
+        return (energySum < energySumHigh) && (energySum > energySumLow);
+    }
+	
+    private void setBackgroundCuts(int backgroundLevel) {
+    	// Make sure that the background level is valid.
+    	if(backgroundLevel < 1 || backgroundLevel > 10) {
+    		throw new RuntimeException(String.format("Trigger cuts are undefined for background level %d.", backgroundLevel));
+    	}
+    	
+    	// Otherwise, set the trigger cuts. Certain cuts are constant
+    	// across all background levels.
+    	clusterEnergyLow = 0.000;
+    	seedEnergyLow = 0.100;
+    	
+    	// Set the variable values.
+    	if(backgroundLevel == 1) {
+    		clusterEnergyHigh = 1.700;
+    		seedEnergyHigh = 1.300;
+    		energySumLow = 0.400;
+    		energySumHigh = 2.00;
+    		energyDifferenceHigh = 1.500;
+    		energySlopeLow = 1.0;
+    		coplanarityHigh = 40;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 2) {
+    		clusterEnergyHigh = 1.600;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.300;
+    		energySumHigh = 2.00;
+    		energyDifferenceHigh = 1.400;
+    		energySlopeLow = 0.8;
+    		coplanarityHigh = 40;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 3) {
+    		clusterEnergyHigh = 1.600;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.200;
+    		energySumHigh = 2.000;
+    		energyDifferenceHigh = 1.400;
+    		energySlopeLow = 0.7;
+    		coplanarityHigh = 40;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 4) {
+    		clusterEnergyHigh = 1.500;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.500;
+    		energySumHigh = 1.950;
+    		energyDifferenceHigh = 1.400;
+    		energySlopeLow = 0.6;
+    		coplanarityHigh = 40;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 5) {
+    		clusterEnergyHigh = 1.500;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.400;
+    		energySumHigh = 2.000;
+    		energyDifferenceHigh = 1.400;
+    		energySlopeLow = 0.6;
+    		coplanarityHigh = 45;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 6) {
+    		clusterEnergyHigh = 1.500;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.200;
+    		energySumHigh = 1.950;
+    		energyDifferenceHigh = 1.400;
+    		energySlopeLow = 0.6;
+    		coplanarityHigh = 55;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 7) {
+    		clusterEnergyHigh = 1.700;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.200;
+    		energySumHigh = 2.000;
+    		energyDifferenceHigh = 1.500;
+    		energySlopeLow = 0.6;
+    		coplanarityHigh = 60;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 8) {
+    		clusterEnergyHigh = 1.700;
+    		seedEnergyHigh = 1.300;
+    		energySumLow = 0.200;
+    		energySumHigh = 2.000;
+    		energyDifferenceHigh = 1.500;
+    		energySlopeLow = 0.6;
+    		coplanarityHigh = 65;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 9) {
+    		clusterEnergyHigh = 1.500;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.400;
+    		energySumHigh = 1.950;
+    		energyDifferenceHigh = 1.400;
+    		energySlopeLow = 0.5;
+    		coplanarityHigh = 60;
+    		minHitCount = 2;
+    	} else if(backgroundLevel == 10) {
+    		clusterEnergyHigh = 1.500;
+    		seedEnergyHigh = 1.200;
+    		energySumLow = 0.400;
+    		energySumHigh = 2.000;
+    		energyDifferenceHigh = 1.400;
+    		energySlopeLow = 0.5;
+    		coplanarityHigh = 65;
+    		minHitCount = 2;
+    	}
+    }
+    
+	/**
+	 * Tests all of the current cluster pairs for triggers.
+	 * 
+	 * @return Returns <code>true</code> if one of the cluster pairs
+	 * passes all of the cluster cuts and <code>false</code> otherwise.
+	 */
+    private boolean testTrigger() {
+    	// Get the list of cluster pairs.
+    	List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
+        
+        // Iterate over the cluster pairs and perform each of the cluster
+        // pair cuts on them. A cluster pair that passes all of the
+        // cuts registers as a trigger.
+    	pairLoop:
+        for (HPSEcalCluster[] clusterPair : clusterPairs) {
+    		// Increment the number of processed cluster pairs.
+    		allPairs++;
+    		
+    		// Get the plot values for the pair cuts.
+    		double energySum = getValueEnergySum(clusterPair);
+    		double energyDifference = getValueEnergyDifference(clusterPair);
+    		double energySlope = getValueEnergySlope(clusterPair);
+    		double coplanarity = getValueCoplanarity(clusterPair);
+    		
+    		// Fill the general plots.
+    		pairEnergySum.fill(energySum, 1);
+    		pairEnergyDifference.fill(energyDifference, 1);
+    		pairEnergySlope.fill(energySlope, 1);
+    		pairCoplanarity.fill(coplanarity, 1);
+    		
+    		// ==== Pair Energy Sum Cut ====================================
+    		// =============================================================
+    		// If the cluster fails the cut, skip to the next pair.
+    		if(!pairEnergySumCut(clusterPair)) { continue pairLoop; }
+    		
+    		// Otherwise, note that it passed the cut.
+    		pairEnergySumCount++;
+        	
+    		// ==== Pair Energy Difference Cut =============================
+    		// =============================================================
+    		// If the cluster fails the cut, skip to the next pair.
+    		if(!pairEnergyDifferenceCut(clusterPair)) { continue pairLoop; }
+    		
+    		// Otherwise, note that it passed the cut.
+    		pairEnergyDifferenceCount++;
+    		
+    		// ==== Pair Energy Slope Cut ==================================
+    		// =============================================================
+    		// If the cluster fails the cut, skip to the next pair.
+    		//if(!energyDistanceCut(clusterPair)) { continue pairLoop; }
+    		if(!pairEnergySlopeCut(clusterPair)) { continue pairLoop; }
+    		
+    		// Otherwise, note that it passed the cut.
+    		pairEnergySlopeCount++;
+    		
+    		// ==== Pair Coplanarity Cut ===================================
+    		// =============================================================
+    		// If the cluster fails the cut, skip to the next pair.
+    		if(!pairCoplanarityCut(clusterPair)) { continue pairLoop; }
+    		
+    		// Otherwise, note that it passed the cut.
+    		pairCoplanarityCount++;
+    		
+    		// Get the cluster plot values.
+    		int[] hitCount = new int[2];
+    		double[] seedEnergy = new double[2];
+    		double[] clusterEnergy = new double[2];
+    		int[] ix = new int[2];
+    		int[] iy = new int[2];
+    		for(int i = 0; i < 2; i++) {
+    			hitCount[i] = clusterPair[i].getCalorimeterHits().size();
+    			seedEnergy[i] = clusterPair[i].getSeedHit().getCorrectedEnergy();
+    			clusterEnergy[i] = clusterPair[i].getEnergy();
+    			ix[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("ix");
+    			iy[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("iy");
+    			if(ix[i] > 0) { ix[i] = ix[i] - 1; }
+    		}
+    		
+    		// Fill the general plots.
+    		for(int i = 0; i < 2; i++) {
+	    		clusterSeedEnergyAll.fill(seedEnergy[i], 1);
+	    		clusterTotalEnergyAll.fill(clusterEnergy[i], 1);
+	    		clusterHitCountAll.fill(hitCount[i], 1);
+	    		clusterDistributionAll.fill(ix[i], iy[i], 1);
+    		}
+    		
+    		// Fill the "passed all cuts" plots.
+    		pairEnergySumAll.fill(energySum, 1);
+    		pairEnergyDifferenceAll.fill(energyDifference, 1);
+    		pairEnergySlopeAll.fill(energySlope, 1);
+    		pairCoplanarityAll.fill(coplanarity, 1);
+    		
+    		// Clusters that pass all of the pair cuts produce a trigger.
+    		return true;
+        }
+        
+        // If the loop terminates without producing a trigger, there
+    	// are no cluster pairs which meet the trigger conditions.
+        return false;
+    }
+    
+    /**
+     * Adds clusters from a new event into the top and bottom cluster
+     * queues so that they may be formed into pairs.
+     * 
+     * @param clusterList - The clusters to add to the queues.
+     */
+    private void updateClusterQueues(List<HPSEcalCluster> clusterList) {
+    	// Create lists to store the top and bottom clusters.
+        ArrayList<HPSEcalCluster> topClusterList = new ArrayList<HPSEcalCluster>();
+        ArrayList<HPSEcalCluster> botClusterList = new ArrayList<HPSEcalCluster>();
+        
+        // Loop over the clusters in the cluster list.
+        for (HPSEcalCluster cluster : clusterList) {
+        	// If the cluster is on the top of the calorimeter, it
+        	// goes into the top cluster list.
+            if (cluster.getSeedHit().getIdentifierFieldValue("iy") > 0) {
+                topClusterList.add(cluster);
+            }
+            
+            // Otherwise, it goes into the bottom cluster list.
+            else { botClusterList.add(cluster); }
+        }
+        
+        // Add the new cluster lists to the cluster queues.
+        topClusterQueue.add(topClusterList);
+        botClusterQueue.add(botClusterList);
+        
+        // Remove the oldest cluster lists from the queues.
+        topClusterQueue.remove();
+        botClusterQueue.remove();
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
FADCTriggerDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCTriggerDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCTriggerDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,678 @@
+package org.hps.readout.ecal;
+
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
+import java.io.IOException;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+
+import org.hps.recon.ecal.ECalUtils;
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.geometry.Detector;
+import org.lcsim.util.aida.AIDA;
+
+/**
+ * Reads clusters and makes trigger decision using opposite quadrant criterion.
+ * Prints triggers to file if file path specified.
+ *
+ * @author Omar Moreno <[log in to unmask]>
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: FADCTriggerDriver.java,v 1.4 2013/09/02 21:56:56 phansson Exp $
+ */
+public class FADCTriggerDriver extends TriggerDriver {
+
+    int nTriggers;
+    int totalEvents;
+    protected double beamEnergy = -1; //by default, get beam energy from detector name
+    private int minHitCount = 1;
+    private boolean useDefaultCuts = true;
+    private double clusterEnergyHigh = 2.2 * ECalUtils.GeV;
+    private double clusterEnergyLow = .1 * ECalUtils.GeV;
+    private double energySumThreshold = 2.2 * ECalUtils.GeV;
+    private double energyDifferenceThreshold = 2.2 * ECalUtils.GeV;
+    private double maxCoplanarityAngle = 90; // degrees
+//    private double energyDistanceDistance = 250; // mm
+//    private double energyDistanceThreshold = 0.8 / 2.2;
+    private double energyDistanceDistance = 200; // mm
+    private double energyDistanceThreshold = 0.5; // unitless fraction
+    // maximum time difference between two clusters, in units of readout cycles (4 ns).
+    private int pairCoincidence = 2;
+    private double originX = 1393.0 * Math.tan(0.03052); //ECal midplane, defined by photon beam position (30.52 mrad) at ECal face (z=1393 mm)
+    int allPairs;
+    int oppositeQuadrantCount;
+    int clusterEnergyCount;
+    int energySumCount;
+    int energyDifferenceCount;
+    int energyDistanceCount;
+    int coplanarityCount;
+    AIDA aida = AIDA.defaultInstance();
+    IHistogram2D clusterHitCount2DAll, clusterEnergy2DAll, clusterSumDiff2DAll, energyDistance2DAll, clusterAngles2DAll, clusterCoplanarity2DAll;
+    IHistogram2D clusterHitCount2D, clusterEnergy2D, clusterSumDiff2D, energyDistance2D, clusterAngles2D, clusterCoplanarity2D;
+    IHistogram1D triggerBits1D, triggerTimes1D;
+    IHistogram2D clusterSeeds, trigClusterSeeds;
+    int truthPeriod = 250;
+    private boolean useQuadrants = false;
+    protected String clusterCollectionName = "EcalClusters";
+    // FIFO queues of lists of clusters in each ECal half.
+    // Each list corresponds to one readout cycle.
+    private Queue<List<HPSEcalCluster>> topClusterQueue = null;
+    private Queue<List<HPSEcalCluster>> botClusterQueue = null;
+    PrintWriter pairWriter;
+
+    private enum Flag {
+
+        CLUSTER_HITCOUNT(4), CLUSTER_ENERGY(3), ENERGY_SUM_DIFF(2), ENERGY_DISTANCE(1), COPLANARITY(0);
+        private final int index;
+
+        Flag(int i) {
+            index = i;
+        }
+
+        static int bitmask(EnumSet<Flag> flags) {
+            int mask = 0;
+            for (Flag flag : flags) {
+                mask |= 1 << flag.index;
+            }
+            return mask;
+        }
+    }
+
+    public void setClusterCollectionName(String clusterCollectionName) {
+        this.clusterCollectionName = clusterCollectionName;
+    }
+
+    public void setCutsFromBeamEnergy(double beamEnergy) {
+        if (beamEnergy == 1.1) {
+            System.out.println(this.getClass().getSimpleName() + ": Setting trigger for 1.1 GeV beam");
+            maxCoplanarityAngle = 90;
+            clusterEnergyHigh = .7 * ECalUtils.GeV;
+            clusterEnergyLow = .1 * ECalUtils.GeV;
+            energySumThreshold = 0.8 * ECalUtils.GeV;
+            energyDifferenceThreshold = beamEnergy;
+        } else if (beamEnergy == 2.2) {
+            System.out.println(this.getClass().getSimpleName() + ": Setting trigger for 2.2 GeV beam");
+            maxCoplanarityAngle = 35;
+            clusterEnergyHigh = 1.5 * ECalUtils.GeV;
+            clusterEnergyLow = .1 * ECalUtils.GeV;
+            energySumThreshold = 1.9 * ECalUtils.GeV;
+            energyDifferenceThreshold = beamEnergy;
+        } else if (beamEnergy == 6.6) {
+            System.out.println(this.getClass().getSimpleName() + ": Setting trigger for 6.6 GeV beam");
+            maxCoplanarityAngle = 60;
+            clusterEnergyHigh = 5.0 * ECalUtils.GeV;
+            clusterEnergyLow = .1 * ECalUtils.GeV;
+            energySumThreshold = 5.5 * ECalUtils.GeV;
+            energyDifferenceThreshold = beamEnergy;
+        }
+    }
+
+    protected double getBeamEnergyFromDetector(Detector detector) {
+        if (detector.getName().contains("1pt1")) {
+            return 1.1;
+        } else if (detector.getName().contains("2pt2")) {
+            return 2.2;
+        } else if (detector.getName().contains("6pt6")) {
+            return 6.6;
+        } else {
+            return -1.0;
+        }
+    }
+
+    public void setTruthPeriod(int truthPeriod) {
+        this.truthPeriod = truthPeriod;
+    }
+
+    public void setPairCoincidence(int pairCoincidence) {
+        this.pairCoincidence = pairCoincidence;
+    }
+
+    /**
+     * Set X coordinate used as the origin for cluster coplanarity and distance
+     * calculations. Defaults to the ECal midplane. Units of mm.
+     *
+     * @param originX
+     */
+    public void setOriginX(double originX) {
+        this.originX = originX;
+    }
+
+    /**
+     * Used for plot ranges and cuts that scale with energy. 1.1, 2.2 and 6.6
+     * are associated with default cuts. Units of GeV.
+     *
+     * @param beamEnergy
+     */
+    public void setBeamEnergy(double beamEnergy) {
+        this.beamEnergy = beamEnergy;
+    }
+
+    /**
+     * Use default cuts based on beam energy.
+     *
+     * @param useDefaultCuts
+     */
+    public void setUseDefaultCuts(boolean useDefaultCuts) {
+        this.useDefaultCuts = useDefaultCuts;
+    }
+
+    /**
+     * Minimum hit count for a cluster.
+     *
+     * @param minHitCount
+     */
+    public void setMinHitCount(int minHitCount) {
+        this.minHitCount = minHitCount;
+    }
+
+    /**
+     * Maximum energy for a cluster. Units of GeV.
+     *
+     * @param clusterEnergyHigh
+     */
+    public void setClusterEnergyHigh(double clusterEnergyHigh) {
+        this.clusterEnergyHigh = clusterEnergyHigh * ECalUtils.GeV;
+    }
+
+    /**
+     * Minimum energy for a cluster. Units of GeV.
+     *
+     * @param clusterEnergyLow
+     */
+    public void setClusterEnergyLow(double clusterEnergyLow) {
+        this.clusterEnergyLow = clusterEnergyLow * ECalUtils.GeV;
+    }
+
+    /**
+     * Maximum energy sum of the two clusters in a pair. Units of GeV.
+     *
+     * @param energySumThreshold
+     */
+    public void setEnergySumThreshold(double energySumThreshold) {
+        this.energySumThreshold = energySumThreshold * ECalUtils.GeV;
+    }
+
+    /**
+     * Maximum energy difference between the two clusters in a pair. Units of
+     * GeV.
+     *
+     * @param energyDifferenceThreshold
+     */
+    public void setEnergyDifferenceThreshold(double energyDifferenceThreshold) {
+        this.energyDifferenceThreshold = energyDifferenceThreshold * ECalUtils.GeV;
+    }
+
+    /**
+     * Maximum deviation from coplanarity for the two clusters in a pair. Units
+     * of degrees.
+     *
+     * @param maxCoplanarityAngle
+     */
+    public void setMaxCoplanarityAngle(double maxCoplanarityAngle) {
+        this.maxCoplanarityAngle = maxCoplanarityAngle;
+    }
+
+    /**
+     * Distance threshold for the energy-distance cut. Units of mm.
+     *
+     * @param energyDistanceDistance
+     */
+    public void setEnergyDistanceDistance(double energyDistanceDistance) {
+        this.energyDistanceDistance = energyDistanceDistance;
+    }
+
+    /**
+     * Energy threshold for the energy-distance cut. Units of the beam energy.
+     *
+     * @param energyDistanceThreshold
+     */
+    public void setEnergyDistanceThreshold(double energyDistanceThreshold) {
+        this.energyDistanceThreshold = energyDistanceThreshold;
+    }
+
+    public void setPairFile(String pairFile) {
+        try {
+            pairWriter = new PrintWriter(pairFile);
+        } catch (IOException e) {
+        }
+    }
+
+    @Override
+    public void detectorChanged(Detector detector) {
+        if (beamEnergy < 0) {
+            beamEnergy = this.getBeamEnergyFromDetector(detector);
+        }
+        if (useDefaultCuts) {
+            setCutsFromBeamEnergy(beamEnergy);
+        }
+
+        clusterHitCount2DAll = aida.histogram2D("All cluster pairs: hit count (less energetic vs. more energetic)", 9, 0.5, 9.5, 9, 0.5, 9.5);
+        clusterSumDiff2DAll = aida.histogram2D("All cluster pairs: energy difference vs. sum", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+        clusterEnergy2DAll = aida.histogram2D("All cluster pairs: energy (less energetic vs. more energetic)", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+        energyDistance2DAll = aida.histogram2D("All cluster pairs: distance vs. energy (less energetic cluster)", 100, 0.0, 0.5 * beamEnergy, 25, 0.0, 400.0);
+        clusterCoplanarity2DAll = aida.histogram2D("All cluster pairs: cluster angle uncoplanarity vs. less energetic cluster angle", 100, -180.0, 180.0, 100, -180.0, 180.0);
+        clusterAngles2DAll = aida.histogram2D("All cluster pairs: cluster angle (less energetic vs. more energetic)", 100, -180.0, 180.0, 100, -180.0, 180.0);
+
+        clusterHitCount2D = aida.histogram2D("Passed other cuts: hit count (less energetic vs. more energetic)", 9, 0.5, 9.5, 9, 0.5, 9.5);
+        clusterSumDiff2D = aida.histogram2D("Passed other cuts: energy difference vs. sum", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+        clusterEnergy2D = aida.histogram2D("Passed other cuts: energy (less energetic vs. more energetic)", 100, 0.0, 2 * beamEnergy, 100, 0.0, beamEnergy);
+        energyDistance2D = aida.histogram2D("Passed other cuts: distance vs. energy (less energetic cluster)", 100, 0.0, 0.5 * beamEnergy, 25, 0.0, 400.0);
+        clusterCoplanarity2D = aida.histogram2D("Passed other cuts: cluster angle uncoplanarity vs. less energetic cluster angle", 100, -180.0, 180.0, 100, -180.0, 180.0);
+        clusterAngles2D = aida.histogram2D("Passed other cuts: cluster angle (less energetic vs. more energetic)", 100, -180.0, 180.0, 100, -180.0, 180.0);
+
+        triggerBits1D = aida.histogram1D(detector.getDetectorName() + " : " + clusterCollectionName + " : trigger bits", 33, -1.5, 31.5);
+        triggerTimes1D = aida.histogram1D(detector.getDetectorName() + " : " + clusterCollectionName + " : trigger times", truthPeriod, -0.5, truthPeriod - 0.5);
+
+        clusterSeeds = aida.histogram2D(detector.getDetectorName() + " : " + clusterCollectionName + " : Cluster seeds", 46, -23, 23, 11, -5.5, 5.5);
+        trigClusterSeeds = aida.histogram2D(detector.getDetectorName() + " : " + clusterCollectionName + " : Cluster seeds, with trigger", 46, -23, 23, 11, -5.5, 5.5);
+    }
+
+    @Override
+    public void startOfData() {
+        //initialize queues and fill with empty lists
+        topClusterQueue = new LinkedList<List<HPSEcalCluster>>();
+        botClusterQueue = new LinkedList<List<HPSEcalCluster>>();
+        for (int i = 0; i < 2 * pairCoincidence + 1; i++) {
+            topClusterQueue.add(new ArrayList<HPSEcalCluster>());
+        }
+        for (int i = 0; i < pairCoincidence + 1; i++) {
+            botClusterQueue.add(new ArrayList<HPSEcalCluster>());
+        }
+        super.startOfData();
+        if (clusterCollectionName == null) {
+            throw new RuntimeException("The parameter clusterCollectionName was not set!");
+        }
+
+        allPairs = 0;
+        oppositeQuadrantCount = 0;
+        clusterEnergyCount = 0;
+        energySumCount = 0;
+        energyDifferenceCount = 0;
+        energyDistanceCount = 0;
+        coplanarityCount = 0;
+    }
+
+    @Override
+    public void process(EventHeader event) {
+        if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+            // this needs to run every readout cycle whether or not trigger is live
+            updateClusterQueues(event.get(HPSEcalCluster.class, clusterCollectionName));
+
+            if (pairWriter != null) {
+                List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
+                for (HPSEcalCluster[] pair : clusterPairs) {
+                    pairWriter.format("%d\t", ClockSingleton.getClock());
+                    for (HPSEcalCluster cluster : pair) {
+                        pairWriter.format("%f\t", cluster.getSeedHit().getTime());
+                        pairWriter.format("%f\t", cluster.getSeedHit().getRawEnergy());
+                        pairWriter.format("%d\t", cluster.getSeedHit().getIdentifierFieldValue("ix"));
+                        pairWriter.format("%d\t", cluster.getSeedHit().getIdentifierFieldValue("iy"));
+                        pairWriter.format("%d\t", cluster.getSize());
+                        pairWriter.format("%f\t", cluster.getEnergy());
+                        pairWriter.format("%f\t", getClusterAngle(cluster));
+                        pairWriter.format("%f\t", getClusterDistance(cluster));
+                    }
+                    pairWriter.println();
+                }
+                pairWriter.flush();
+            }
+        }
+        super.process(event);
+    }
+
+    @Override
+    protected boolean triggerDecision(EventHeader event) {
+        // Get the list of raw ECal hits.
+        if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+            return testTrigger();
+        } else {
+            return false;
+        }
+    }
+
+    public boolean testTrigger() {
+        boolean trigger = false;
+
+        List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
+
+        //--- Apply Trigger Cuts ---//
+
+        // Iterate through all cluster pairs present in the event.  If at least
+        // one of the cluster pairs satisfies all of the trigger conditions,
+        // a trigger signal is sent to all other detectors.
+        for (HPSEcalCluster[] clusterPair : clusterPairs) {
+
+            EnumSet<Flag> bits = EnumSet.noneOf(Flag.class);
+
+            if (outputStream != null) {
+                outputStream.printf("Event %d: cluster pair (energy %f in quadrant %d (%s), energy %f in quadrant %d (%s))\n",
+                        ClockSingleton.getClock(),
+                        clusterPair[0].getEnergy(), ECalUtils.getQuadrant(clusterPair[0]), clusterPair[0].getSeedHit().getPositionVec().toString(),
+                        clusterPair[1].getEnergy(), ECalUtils.getQuadrant(clusterPair[1]), clusterPair[1].getSeedHit().getPositionVec().toString());
+            }
+
+            allPairs++;
+
+            if (useQuadrants) {
+                // Require that the event have at least two clusters in opposite
+                // quadrants
+                if (!oppositeQuadrantsCut(clusterPair)) {
+                    if (outputStream != null) {
+                        outputStream.println("Failed opposite quadrant cut");
+                    }
+                    continue;
+                }
+                oppositeQuadrantCount++;
+            }
+
+            // Require the components of a cluster pair to have at least one 
+            // hit each (should always be true)
+            if (clusterHitCount(clusterPair)) {
+                bits.add(Flag.CLUSTER_HITCOUNT);
+            }
+
+            // Require the components of a cluster pair to have an energy in
+            // the range of 100 MeV to 1.85 GeV
+            if (clusterECut(clusterPair)) {
+                bits.add(Flag.CLUSTER_ENERGY);
+            }
+
+            bits.add(Flag.ENERGY_SUM_DIFF);
+            // Require the sum of the energies of the components of the
+            // cluster pair to be less than the
+            // (Beam Energy)*(Sampling Fraction) ( 2 GeV for the Test Run )
+            if (!energySum(clusterPair)) {
+                bits.remove(Flag.ENERGY_SUM_DIFF);
+            }
+
+            // Require the difference in energy of the components of the
+            // cluster pair to be less than 1.5 GeV
+            if (!energyDifference(clusterPair)) {
+                bits.remove(Flag.ENERGY_SUM_DIFF);
+            }
+
+            // Apply a low energy cluster vs. distance cut of the form
+            // E_low + .0032 GeV/mm < .8 GeV
+            if (energyDistanceCut(clusterPair)) {
+                bits.add(Flag.ENERGY_DISTANCE);
+            }
+
+            // Require that the two clusters are coplanar with the beam within
+            // 35 degrees
+            if (coplanarityCut(clusterPair)) {
+                bits.add(Flag.COPLANARITY);
+            }
+
+            if (bits.contains(Flag.CLUSTER_ENERGY)) {
+                clusterEnergyCount++;
+                if (energySum(clusterPair)) {
+                    energySumCount++;
+                    if (energyDifference(clusterPair)) {
+                        energyDifferenceCount++;
+                        if (bits.contains(Flag.ENERGY_DISTANCE)) {
+                            energyDistanceCount++;
+                            if (bits.contains(Flag.COPLANARITY)) {
+                                coplanarityCount++;
+                            } else if (outputStream != null) {
+                                outputStream.println("Failed coplanarity cut");
+                            }
+                        } else if (outputStream != null) {
+                            outputStream.println("Failed energy-distance cut");
+                        }
+                    } else if (outputStream != null) {
+                        outputStream.println("Failed energy difference cut");
+                    }
+                } else if (outputStream != null) {
+                    outputStream.println("Failed energy sum cut");
+                }
+            } else if (outputStream != null) {
+                outputStream.println("Failed cluster energy cut");
+            }
+
+            clusterHitCount2DAll.fill(clusterPair[0].getCalorimeterHits().size(), clusterPair[1].getCalorimeterHits().size());
+            clusterSumDiff2DAll.fill(clusterPair[0].getEnergy() + clusterPair[1].getEnergy(), clusterPair[0].getEnergy() - clusterPair[1].getEnergy());
+            clusterEnergy2DAll.fill(clusterPair[0].getEnergy(), clusterPair[1].getEnergy());
+            energyDistance2DAll.fill(clusterPair[1].getEnergy(), getClusterDistance(clusterPair[1]));
+            clusterCoplanarity2DAll.fill(getClusterAngle(clusterPair[1]), pairUncoplanarity(clusterPair));
+            clusterAngles2DAll.fill(getClusterAngle(clusterPair[0]), getClusterAngle(clusterPair[1]));
+
+            if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.CLUSTER_HITCOUNT)))) {
+                clusterHitCount2D.fill(clusterPair[0].getCalorimeterHits().size(), clusterPair[1].getCalorimeterHits().size());
+            }
+
+            if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.ENERGY_SUM_DIFF, Flag.CLUSTER_ENERGY)))) { //cluster energy, energy-distance, coplanarity
+                clusterSumDiff2D.fill(clusterPair[0].getEnergy() + clusterPair[1].getEnergy(), clusterPair[0].getEnergy() - clusterPair[1].getEnergy());
+                clusterEnergy2D.fill(clusterPair[0].getEnergy(), clusterPair[1].getEnergy());
+            }
+            if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.ENERGY_DISTANCE)))) {
+                energyDistance2D.fill(clusterPair[1].getEnergy(), getClusterDistance(clusterPair[1]));
+            }
+            if (bits.containsAll(EnumSet.complementOf(EnumSet.of(Flag.COPLANARITY)))) {
+                clusterCoplanarity2D.fill(getClusterAngle(clusterPair[1]), pairUncoplanarity(clusterPair));
+                clusterAngles2D.fill(getClusterAngle(clusterPair[0]), getClusterAngle(clusterPair[1]));
+            }
+
+            triggerBits1D.fill(Flag.bitmask(bits));
+
+            if (bits.containsAll(EnumSet.allOf(Flag.class))) {
+                // If all cuts are pased, we have a trigger
+                if (outputStream != null) {
+                    outputStream.println("Passed all cuts");
+                }
+                trigger = true;
+
+                for (HPSEcalCluster cluster : clusterPair) {
+                    int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
+                    int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
+                    trigClusterSeeds.fill(ix - 0.5 * Math.signum(ix), iy);
+                }
+            }
+        }
+        if (trigger) {
+            triggerBits1D.fill(-1);
+            triggerTimes1D.fill(ClockSingleton.getClock() % truthPeriod);
+        }
+        return trigger;
+    }
+
+    @Override
+    public void endOfData() {
+        if (outputStream != null) {
+            printCounts(outputStream);
+        }
+        printCounts(new PrintWriter(System.out));
+        if (pairWriter != null) {
+            pairWriter.close();
+        }
+        super.endOfData();
+    }
+
+    private void printCounts(PrintWriter writer) {
+        writer.printf("Number of pairs: %d\n", allPairs);
+        writer.printf("Number of cluster pairs after successive trigger conditions:\n");
+        if (useQuadrants) {
+            writer.printf("Opposite quadrants: %d\n", oppositeQuadrantCount);
+        }
+        writer.printf("Cluster energy: %d\n", clusterEnergyCount);
+        writer.printf("Energy sum: %d\n", energySumCount);
+        writer.printf("Energy difference: %d\n", energyDifferenceCount);
+        writer.printf("Energy-distance cut: %d\n", energyDistanceCount);
+        writer.printf("Coplanarity: %d\n", coplanarityCount);
+        writer.printf("Trigger count: %d\n", numTriggers);
+        writer.close();
+    }
+
+    protected void updateClusterQueues(List<HPSEcalCluster> ecalClusters) {
+        ArrayList<HPSEcalCluster> topClusterList = new ArrayList<HPSEcalCluster>();
+        ArrayList<HPSEcalCluster> botClusterList = new ArrayList<HPSEcalCluster>();
+        for (HPSEcalCluster ecalCluster : ecalClusters) {
+//            System.out.format("add cluster\t%f\t%d\n", ecalCluster.getSeedHit().getTime(), ecalCluster.getSeedHit().getIdentifierFieldValue("iy"));
+            if (ecalCluster.getSeedHit().getIdentifierFieldValue("iy") > 0) {
+                topClusterList.add(ecalCluster);
+            } else {
+                botClusterList.add(ecalCluster);
+            }
+
+            int ix = ecalCluster.getSeedHit().getIdentifierFieldValue("ix");
+            int iy = ecalCluster.getSeedHit().getIdentifierFieldValue("iy");
+            clusterSeeds.fill(ix - 0.5 * Math.signum(ix), iy);
+        }
+
+        topClusterQueue.add(topClusterList);
+        botClusterQueue.add(botClusterList);
+        topClusterQueue.remove();
+        botClusterQueue.remove();
+    }
+
+    /**
+     * Get a list of all unique cluster pairs in the event
+     *
+     * @param ecalClusters : List of ECal clusters
+     * @return list of cluster pairs
+     */
+    protected List<HPSEcalCluster[]> getClusterPairsTopBot() {
+        // Make a list of cluster pairs
+        List<HPSEcalCluster[]> clusterPairs = new ArrayList<HPSEcalCluster[]>();
+
+        // Loop over all top-bottom pairs of clusters; higher-energy cluster goes first in the pair
+        // To apply pair coincidence time, use only bottom clusters from the 
+        // readout cycle pairCoincidence readout cycles ago, and top clusters 
+        // from all 2*pairCoincidence+1 previous readout cycles
+        for (HPSEcalCluster botCluster : botClusterQueue.element()) {
+            for (List<HPSEcalCluster> topClusters : topClusterQueue) {
+                for (HPSEcalCluster topCluster : topClusters) {
+//                    System.out.format("%f\t%f\n", topCluster.getSeedHit().getTime(), botCluster.getSeedHit().getTime());
+                    if (topCluster.getEnergy() > botCluster.getEnergy()) {
+                        HPSEcalCluster[] clusterPair = {topCluster, botCluster};
+                        clusterPairs.add(clusterPair);
+                    } else {
+                        HPSEcalCluster[] clusterPair = {botCluster, topCluster};
+                        clusterPairs.add(clusterPair);
+                    }
+                }
+            }
+        }
+        return clusterPairs;
+    }
+
+    /**
+     * Checks if the ECal clusters making up a cluster pair lie in opposite
+     * quadrants
+     *
+     * @param clusterPair : pair of clusters
+     * @return true if opposite quadrants, false otherwise
+     */
+    protected boolean oppositeQuadrantsCut(HPSEcalCluster[] clusterPair) {
+        int quad1 = ECalUtils.getQuadrant(clusterPair[0]);
+        int quad2 = ECalUtils.getQuadrant(clusterPair[1]);
+
+        //if clusters are in the same quadrant, they're not opposite quadrants
+        if (quad1 == quad2) {
+            return false;
+        } //opposite pairs of quadrants are either both even (2 and 4) or both odd (1 and 3)
+        else {
+            return ((quad1 & 1) == (quad2 & 1));
+        }
+    }
+
+    /**
+     * Checks if the ECal clusters making up a cluster pair both have at least
+     * the minimum number of hits.
+     *
+     * @param clusterPair: pair of clusters
+     * @return true if pair passes cut, false if fail
+     */
+    protected boolean clusterHitCount(HPSEcalCluster[] clusterPair) {
+        return (clusterPair[0].getCalorimeterHits().size() >= minHitCount
+                && clusterPair[1].getCalorimeterHits().size() >= minHitCount);
+    }
+
+    /**
+     * Checks if the ECal clusters making up a cluster pair lie above the low
+     * energy threshold and below the high energy threshold
+     *
+     * @param clusterPair : pair of clusters
+     * @return true if a pair is found, false otherwise
+     */
+    protected boolean clusterECut(HPSEcalCluster[] clusterPair) {
+        return (clusterPair[0].getEnergy() < clusterEnergyHigh
+                && clusterPair[1].getEnergy() < clusterEnergyHigh
+                && clusterPair[0].getEnergy() > clusterEnergyLow
+                && clusterPair[1].getEnergy() > clusterEnergyLow);
+    }
+
+    /**
+     * Checks if the sum of the energies of ECal clusters making up a cluster
+     * pair is below an energy sum threshold
+     *
+     * @param clusterPair : pair of clusters
+     * @return true if a pair is found, false otherwise
+     */
+    protected boolean energySum(Cluster[] clusterPair) {
+        double clusterESum = clusterPair[0].getEnergy() + clusterPair[1].getEnergy();
+        return (clusterESum < energySumThreshold);
+    }
+
+    /**
+     * Checks if the energy difference between the ECal clusters making up a
+     * cluster pair is below an energy difference threshold
+     *
+     * @param clusterPair : pair of clusters
+     * @return true if pair is found, false otherwise
+     */
+    protected boolean energyDifference(HPSEcalCluster[] clusterPair) {
+        double clusterEDifference = clusterPair[0].getEnergy() - clusterPair[1].getEnergy();
+
+        return (clusterEDifference < energyDifferenceThreshold);
+    }
+
+    /**
+     * Require that the distance from the beam of the lowest energy cluster in a
+     * cluster pair satisfies the following E_low + d_b*.0032 GeV/mm < .8 GeV
+     *
+     * @param clusterPair : pair of clusters
+     * @return true if pair is found, false otherwise
+     */
+    protected boolean energyDistanceCut(HPSEcalCluster[] clusterPair) {
+        HPSEcalCluster lowEnergyCluster = clusterPair[1];
+
+        // Calculate its position
+        double lowEClusterDistance = getClusterDistance(clusterPair[1]);
+        // event passes cut if above the line with X- and Y-intercepts defined by energyDistanceDistance and beamEnergy*energyDistanceThreshold
+        double clusterDistvsE = lowEnergyCluster.getEnergy() + lowEClusterDistance * beamEnergy * energyDistanceThreshold / energyDistanceDistance;
+
+        return (clusterDistvsE > beamEnergy * energyDistanceThreshold);
+    }
+
+    /**
+     * Checks if a cluster pair is coplanar to the beam within a given angle
+     *
+     * @param clusterPair : pair of clusters
+     * @return true if pair is found, false otherwise
+     */
+    protected boolean coplanarityCut(HPSEcalCluster[] clusterPair) {
+        return (Math.abs(pairUncoplanarity(clusterPair)) < maxCoplanarityAngle);
+    }
+
+    protected double pairUncoplanarity(HPSEcalCluster[] clusterPair) { // Find the angle between clusters in the pair
+        double cluster1Angle = (getClusterAngle(clusterPair[0]) + 180.0) % 180.0;
+        double cluster2Angle = (getClusterAngle(clusterPair[1]) + 180.0) % 180.0;
+
+        return cluster2Angle - cluster1Angle;
+    }
+
+    protected double getClusterAngle(HPSEcalCluster cluster) { //returns angle in range of -180 to 180
+        double position[] = cluster.getSeedHit().getPosition();
+        return Math.toDegrees(Math.atan2(position[1], position[0] - originX));
+    }
+
+    protected double getClusterDistance(HPSEcalCluster cluster) {
+        return Math.hypot(cluster.getSeedHit().getPosition()[0] - originX, cluster.getSeedHit().getPosition()[1]);
+    }
+}
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
FADCTriggerVariableDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCTriggerVariableDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/FADCTriggerVariableDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,172 @@
+/*
+ * To change this template, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package org.hps.readout.ecal;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.geometry.Detector;
+
+/**
+ * Dumps trigger variables to text file
+ * @author phansson <[log in to unmask]>
+ * @version $id: $
+ */
+public class FADCTriggerVariableDriver extends FADCTriggerDriver {
+    private int _pairs = 0;
+
+    public FADCTriggerVariableDriver() {
+    }
+
+    @Override
+    public void startOfData() {
+        if(!"".equals(outputFileName)) {
+            try {
+                outputStream = new PrintWriter(outputFileName);
+            } catch (FileNotFoundException ex) {
+                Logger.getLogger(FADCTriggerVariableDriver.class.getName()).log(Level.SEVERE, null, ex);
+            }
+        } else {
+            throw new RuntimeException("Need to supply a output file!");
+        }
+        
+        String str = "event/I:beamenergy/F:pairid/I:cl1E/F:cl1posx/F:cl1posy/F:cl2E/F:cl2posx/F:cl2posy/F";
+        
+        outputStream.println(str);
+
+    }
+
+    
+    
+    @Override
+    public void detectorChanged(Detector detector) {
+        setCutsFromBeamEnergy(getBeamEnergyFromDetector(detector));
+    }
+
+
+    @Override
+    public void process(EventHeader event) {
+    //    super.process(event);
+
+        if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+
+
+            List<HPSEcalCluster> clusters = event.get(HPSEcalCluster.class, clusterCollectionName);
+
+            //System.out.printf("%d ecal clusters in event\n", clusters.size());
+            //System.out.printf("%s: %d clusters\n",this.getClass().getSimpleName(),clusters.size());
+        	//for(HPSEcalCluster cl : clusters) {
+        	//	System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
+        	//}
+        	List<HPSEcalCluster> unique_clusters = this.getUniqueClusters(clusters);
+        	//System.out.printf("%s: %d unique clusters\n",this.getClass().getSimpleName(),unique_clusters.size());
+        	//for(HPSEcalCluster cl : unique_clusters) {
+        	//	System.out.printf("%s: cl E %f x %f y %f \n",this.getClass().getSimpleName(),cl.getEnergy(),cl.getPosition()[0],cl.getPosition()[1]);
+        	//}
+
+            updateClusterQueues(unique_clusters);
+            List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
+            boolean foundClusterPairs = !clusterPairs.isEmpty();
+
+            if (foundClusterPairs) {
+
+            int ipair = 0;
+            for(HPSEcalCluster[] pair : clusterPairs) {
+            
+                String evString = String.format("%d %f %d ", event.getEventNumber(),this.beamEnergy,ipair);
+                for(int icluster = 0; icluster!=2; icluster++ ) {
+                    
+                    HPSEcalCluster cluster = pair[icluster];
+                    
+                    //int quad = ECalUtils.getQuadrant(cluster);
+                    double E = cluster.getEnergy();
+                    double pos[] = cluster.getSeedHit().getPosition();
+                    //System.out.printf("x %f y %f ix %d iy %d \n", pos[0], pos[1], cluster.getSeedHit().getIdentifierFieldValue("ix"), cluster.getSeedHit().getIdentifierFieldValue("iy"));
+                    
+                    evString += String.format("%f %f %f ", E, pos[0], pos[1]);
+                }
+                //System.out.printf("%s\n",evString);
+                outputStream.println(evString);
+                ++ipair;     
+                ++_pairs;
+            } // pairs
+            }
+             
+        } // has clusters 
+        else {
+            //System.out.printf("No ecal cluster collection in event %d \n", event.getEventNumber());
+        }
+            
+        
+    
+    }
+
+    @Override
+    public void endOfData() {
+        
+            System.out.printf("%s: processed %d pairs\n",this.getClass().getSimpleName(),this._pairs);
+    
+            outputStream.close();
+            
+            
+    }
+
+    
+    
+    private List<HPSEcalCluster> getUniqueClusters(List<HPSEcalCluster> clusters) {
+    	List<HPSEcalCluster> unique = new ArrayList<HPSEcalCluster>();
+    	for(HPSEcalCluster loop_cl : clusters) {
+			HPSEcalClusterCmp loop_clCmp = new HPSEcalClusterCmp(loop_cl);
+    		boolean found = false;
+			for(HPSEcalCluster cl : unique) {
+    			if( loop_clCmp.compareTo(cl) == 0 ) {
+    				found = true;
+    			}
+    		}
+			if( !found ) {
+				unique.add(loop_cl);
+			}
+    	}
+    	return unique;
+    }
+
+
+    private static class HPSEcalClusterCmp implements Comparable<HPSEcalCluster> {
+    	private HPSEcalCluster _cluster;
+		public HPSEcalClusterCmp(HPSEcalCluster cl) {
+			set_cluster(cl);
+		}
+		@Override
+		public int compareTo(HPSEcalCluster cl) {
+				if(cl.getEnergy()==get_cluster().getEnergy() && cl.getPosition()[0]==get_cluster().getPosition()[0] && cl.getPosition()[1]==get_cluster().getPosition()[1] ) {
+					return 0;
+				} else {
+					if( cl.getEnergy() > get_cluster().getEnergy()) {
+						return 1;
+					} else {
+						return -1;
+					}
+				}
+		}
+		public HPSEcalCluster get_cluster() {
+			return _cluster;
+		}
+		public void set_cluster(HPSEcalCluster _cluster) {
+			this._cluster = _cluster;
+		}
+    	
+    }
+    
+}
+    
+    
+    
+

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
HPSCalorimeterHit.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/HPSCalorimeterHit.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/HPSCalorimeterHit.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -4,21 +4,11 @@
 
 import java.util.Comparator;
 
-import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.lcsim.detector.IDetectorElement;
 import org.lcsim.detector.IDetectorElementContainer;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.base.BaseCalorimeterHit;
-import org.lcsim.geometry.Detector;
 
 /**
  * An implementation of CalorimeterHit, with a constructor that sets rawEnergy
@@ -29,11 +19,6 @@
  */
 public class HPSCalorimeterHit extends BaseCalorimeterHit {
 
-    Detector detector = null;    
-    //static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null; 
-
     /**
      * Fully qualified constructor that sets rawEnergy
      *
@@ -42,53 +27,9 @@
      * @param time     Time of energy deposition
      * @param id       Cell ID
      * @param type     Type
-     * WARNING: setDetector(detector° must be called after initialization
      */
     public HPSCalorimeterHit(double energy, double time, long id, int type) {
         this.rawEnergy = energy;
-//      if (position != null) {
-//          this.positionVec = new BasicHep3Vector(position);
-//      } else {
-//          positionVec = null;
-//      }
-      this.time = time;
-      this.id = id;
-      this.type = type;
-    }
-    
-    /**
-     * Fully qualified constructor that sets rawEnergy
-     *
-     * @param energy   Raw energy for this cell
-     * @param position Global Cartesian coordinate for this cell
-     * @param time     Time of energy deposition
-     * @param id       Cell ID
-     * @param type     Type
-     * WARNING: setDetector(detector) must be called after initialization
-     */
-    public HPSCalorimeterHit(CalorimeterHit hit) {
-        this.rawEnergy = hit.getRawEnergy();
-//      if (position != null) {
-//          this.positionVec = new BasicHep3Vector(position);
-//      } else {
-//          positionVec = null;
-//      }
-      this.time = hit.getTime();
-      this.id = hit.getCellID();
-      this.type = hit.getType();
-    }
-    
-    /**
-     * Fully qualified constructor that sets rawEnergy
-     *
-     * @param energy   Raw energy for this cell
-     * @param position Global Cartesian coordinate for this cell
-     * @param time     Time of energy deposition
-     * @param id       Cell ID
-     * @param type     Type
-     */
-    public void setParameters(double energy, double time, long id, int type) {
-        this.rawEnergy = energy;
 //        if (position != null) {
 //            this.positionVec = new BasicHep3Vector(position);
 //        } else {
@@ -103,7 +44,7 @@
     public IDetectorElement getDetectorElement() {
         if (de == null) {
 //            findDetectorElementByPosition();
-            IDetectorElementContainer detectorElements = detector.getDetectorElement().findDetectorElement(getIdentifier());
+            IDetectorElementContainer detectorElements = EcalConditions.getSubdetector().getDetectorElement().findDetectorElement(getIdentifier());
             if (detectorElements.size() != 1) {
                 throw new RuntimeException("Expected exactly one DetectorElement matching ID " + getIdentifier() + ", got " + detectorElements.size());
             } else {
@@ -131,26 +72,4 @@
             return Double.compare(o1.getTime(), o2.getTime());
         }
     }
-    
-    /** 
-     * Must be set when an object HPSCalorimeterHit is created.
-     * @param detector (long)
-     */
-    public void setDetector(Detector detector) {
-        this.detector = detector;
-        
-        // ECAL combined conditions object.
-        //ecalConditions = ConditionsManager.defaultInstance()
-        //        .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        //channels = ecalConditions.getChannelCollection();
-        
-        // ID helper.
-        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
-        
-//        System.out.println("You are now using the database conditions for HPSCalorimeterHit.");
-    }
-    
-    
 }

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
NeutralPionTriggerDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/NeutralPionTriggerDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/NeutralPionTriggerDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,1081 @@
+package org.hps.readout.ecal;
+
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Vector;
+import hep.physics.vec.VecOp;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.lcsim.detector.IGeometryInfo;
+import org.lcsim.detector.solids.Trd;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.aida.AIDA;
+
+/**
+ * Class <code>NeutralPionTriggerDriver</code> simulates a pi-0 trigger.
+ * It executes four cuts, two of which are single cluster cuts and two
+ * of which are cluster pair cuts. The single cluster cuts are on the
+ * total energy of the cluster and the energy of the seed hit of the
+ * cluster. The first cluster pair cut is on the sum of the energies of
+ * both clusters. The second calculates the invariant mass of the
+ * particle that produced the clusters, assuming that clusters were
+ * created by an electron/positron pair. The pair is then cut if the
+ * invariant mass is outside the expected range for a neutral pion decay.
+ * <br/><br/>
+ * All incoming clusters are passed through the single cluster cuts and
+ * those which survive are added to a list of clusters for their event
+ * and stored in a buffer. The buffer stores a number of event lists
+ * equal to coincidence window parameter. This limits the time frame
+ * in which clusters can be used for a trigger. Of the clusters stored
+ * in the cluster buffer, the two with the highest energies are chosen
+ * and the cluster pair cuts are applied to them. If the highest energy
+ * pair survives this process, the event triggers. If it does not,
+ * there is no trigger for the event.
+ * <br/><br/>
+ * All thresholds can be set through a steering file, along with the
+ * coincidence window. The driver also supports a verbose mode where
+ * it will output more details with every event to help with diagnostics.
+ * 
+ * @author Kyle McCarty
+ * @author Michel Garçon
+ */
+public class NeutralPionTriggerDriver extends TriggerDriver {
+	
+	// ==================================================================
+	// ==== Trigger Algorithms ==========================================
+	// ==================================================================	
+	
+    @Override
+    public void endOfData() {
+    	// Print out the results of the trigger cuts.
+    	System.out.printf("Trigger Processing Results%n");
+    	System.out.printf("\tSingle-Cluster Cuts%n");
+    	System.out.printf("\t\tTotal Clusters Processed     :: %d%n", allClusters);
+    	System.out.printf("\t\tPassed Seed Energy Cut       :: %d%n", clusterSeedEnergyCount);
+    	System.out.printf("\t\tPassed Hit Count Cut         :: %d%n", clusterHitCountCount);
+    	if(rejectEdgeCrystals) {
+    		System.out.printf("\t\tPassed Edge Crystal Cut      :: %d%n", clusterEdgeCount);
+    	}
+    	System.out.printf("%n");
+    	System.out.printf("\tCluster Pair Cuts%n");
+    	System.out.printf("\t\tTotal Pairs Processed        :: %d%n", allPairs);
+    	System.out.printf("\t\tPassed Energy Sum Cut        :: %d%n", pairEnergySumCount);
+    	System.out.printf("\t\tPassed Energy Invariant Mass :: %d%n", pairInvariantMassCount);
+    	System.out.printf("%n");
+    	System.out.printf("\tTrigger Count :: %d%n", triggers);
+    	
+    	// Run the superclass method.
+        super.endOfData();
+    }
+	
+	public void process(EventHeader event) {
+		// Generate a temporary list to store the good clusters
+		// in before they are added to the buffer.
+		List<HPSEcalCluster> tempList = new ArrayList<HPSEcalCluster>();
+		
+		// If the current event has a cluster collection, get it.
+		if(event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+			// VERBOSE :: Note that a cluster collection exists for
+			//            this event.
+			if(verbose) { System.out.println("Cluster collection is present for event."); }
+			
+			// Get the cluster list from the event.
+			List<HPSEcalCluster> eventList = event.get(HPSEcalCluster.class, clusterCollectionName);
+			
+			// VERBOSE :: Output the number of extant clusters.
+			if(verbose) { System.out.printf("%d clusters in event.%n", eventList.size()); }
+			
+			// Add the clusters from the event into the cluster list
+			// if they pass the minimum total cluster energy and seed
+			// energy thresholds.
+			for(HPSEcalCluster cluster : eventList) {
+				// Increment the clusters processed count.
+				allClusters++;
+				
+				// Plot the seed energy / cluster energy histogram.
+				seedPercent.fill(cluster.getSeedHit().getCorrectedEnergy() / cluster.getEnergy(), 1);
+				
+				// Get the cluster position indices.
+				int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
+				int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
+				
+				// VERBOSE :: Output the current cluster's properties.
+				if(verbose) {
+					System.out.printf("\tTesting cluster at (%d, %d) with total energy %f and seed energy %f.%n",
+							ix, iy, cluster.getSeedHit().getCorrectedEnergy(), cluster.getEnergy());
+				}
+				
+				// Add the clusters to the uncut histograms.
+				clusterHitCount.fill(cluster.getCalorimeterHits().size());
+				clusterTotalEnergy.fill(cluster.getEnergy());
+				clusterSeedEnergy.fill(cluster.getSeedHit().getCorrectedEnergy());
+				clusterDistribution.fill(ix, iy, 1);
+				
+				// VERBOSE :: Output the single cluster trigger thresholds.
+				if(verbose) {
+					System.out.printf("\tCluster seed energy threshold  :: [%f, %f]%n", clusterSeedEnergyThresholdLow, clusterSeedEnergyThresholdHigh);
+					System.out.printf("\tCluster total energy threshold :: %f%n%n", clusterTotalEnergyThresholdLow);
+				}
+				
+				// Perform the single cluster cuts.
+				boolean totalEnergyCut = clusterTotalEnergyCut(cluster);
+				boolean seedEnergyCut = clusterSeedEnergyCut(cluster);
+				boolean hitCountCut = clusterHitCountCut(cluster);
+				boolean edgeCrystalCut = isEdgeCluster(cluster);
+				
+				// Increment the single cut counts.
+				if(seedEnergyCut) {
+					clusterSeedEnergyCount++;
+					if(hitCountCut) {
+						clusterHitCountCount++;
+						if(rejectEdgeCrystals && edgeCrystalCut) {
+							clusterEdgeCount++;
+						}
+					}
+				}
+				
+				// VERBOSE :: Note whether the cluster passed the single
+				//            cluster cuts.
+				if(verbose) {
+					System.out.printf("\tPassed seed energy cut    :: %b%n", seedEnergyCut);
+					System.out.printf("\tPassed cluster energy cut :: %b%n%n", totalEnergyCut);
+					System.out.printf("\tPassed hit count cut :: %b%n%n", hitCountCut);
+					System.out.printf("\tIs an edge cluster :: %b%n%n", edgeCrystalCut);
+				}
+				
+				// Determine whether the cluster passes all the single
+				// cluster cuts.
+				boolean passedCuts = false;
+				
+				// If edge crystals should be not be used for triggering,
+				// require that the cluster not be centered in an edge
+				// crystal.
+				if(rejectEdgeCrystals) {
+					if(totalEnergyCut && seedEnergyCut && hitCountCut && !edgeCrystalCut) {
+						passedCuts = true;
+					}
+				}
+				
+				// Otherwise, it just needs to pass the standard trigger
+				// cuts regardless of where it is located.
+				else {
+					if(totalEnergyCut && seedEnergyCut && hitCountCut) {
+						passedCuts = true;
+					}
+				}
+				
+				// If both pass, add the cluster to the list.
+				if(passedCuts) {
+					// Add the cluster to the cluster list.
+					tempList.add(cluster);
+					
+					// Add the cluster information to the single cut histograms.
+					pClusterHitCount.fill(cluster.getCalorimeterHits().size());
+					pClusterTotalEnergy.fill(cluster.getEnergy());
+					pClusterSeedEnergy.fill(cluster.getSeedHit().getCorrectedEnergy());
+					pClusterDistribution.fill(ix, iy, 1);
+				}
+			}
+			
+			// Remove the oldest cluster buffer element and add the new
+			// cluster list to the buffer.
+			clusterBuffer.removeFirst();
+			clusterBuffer.addLast(tempList);
+		}
+		
+		// Otherwise, clear the cluster list.
+		else {
+			// VERBOSE :: Note that the event has no clusters.
+			if(verbose) { System.out.println("No cluster collection is present for event.\n"); }
+		}
+		
+		// Reset the highest energy pair to null.
+		clusterTriplet[0] = null;
+		clusterTriplet[1] = null;
+		clusterTriplet[2] = null;
+		
+		// Loop over all of the cluster lists in the cluster buffer.
+		double[] energy = { 0.0, 0.0, 0.0 };
+		for(List<HPSEcalCluster> bufferList : clusterBuffer) {
+			// Loop over all of the clusters in each buffer list.
+			for(HPSEcalCluster cluster : bufferList) {
+				// If the new cluster is higher energy than the first
+				// slot cluster, move the subsequent clusters down and
+				// insert the new one.
+				if(cluster.getEnergy() > energy[0]) {
+					clusterTriplet[2] = clusterTriplet[1];
+					clusterTriplet[1] = clusterTriplet[0];
+					clusterTriplet[0] = cluster;
+					energy[2] = energy[1];
+					energy[1] = energy[0];
+					energy[0] = cluster.getEnergy();
+				}
+				
+				// Otherwise, if the new cluster has more energy than
+				// the second slot, it goes there and the second does
+				// to the third.
+				else if(cluster.getEnergy() > energy[1]) {
+					clusterTriplet[2] = clusterTriplet[1];
+					clusterTriplet[1] = cluster;
+					energy[2] = energy[1];
+					energy[1] = cluster.getEnergy();
+				}
+				
+				// If the new cluster has more energy than the third
+				// cluster, it just replaces it.
+				else if(cluster.getEnergy() > energy[2]) {
+					clusterTriplet[2] = cluster;
+					energy[2] = cluster.getEnergy();
+				}
+			}
+		}
+		
+		// The highest energy pair is the same as the first two slots
+		// of the highest energy triplet.
+		clusterPair[0] = clusterTriplet[0];
+		clusterPair[1] = clusterTriplet[1];
+		
+		// Run the superclass event process.
+		super.process(event);
+	}
+	
+	public void startOfData() {
+		// Initialize the cluster buffer to the size of the coincidence window.
+		clusterBuffer = new LinkedList<List<HPSEcalCluster>>();
+		
+		// Populate the buffer with empty lists.
+		for(int i = 0; i < coincidenceWindow; i++) {
+			clusterBuffer.add(new ArrayList<HPSEcalCluster>(0));
+		}
+		
+		// Initialize the cluster hit count diagnostic plots.
+		clusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution", 9, 1, 10);
+		pClusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed Single Cuts)", 9, 1, 10);
+		aClusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed All Cuts)", 9, 1, 10);
+		
+		// Initialize the cluster total energy diagnostic plots.
+		clusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution", 176, 0.0, 2.2);
+		pClusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
+		aClusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+		
+		// Initialize the cluster seed energy diagnostic plots.
+		clusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution", 176, 0.0, 2.2);
+		pClusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
+		aClusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+		
+		// Initialize the seed distribution diagnostic plots.
+		clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 44, -22.0, 22.0, 10, -5, 5);
+		pClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+		aClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+		
+		// Initialize the cluster pair energy sum diagnostic plots.
+		pairEnergySum = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution", 176, 0.0, 2.2);
+		pPairEnergySum = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution (Passed Pair Cuts)", 176, 0.0, 2.2);
+		
+		// Initialize the cluster pair hypothetical invariant mass diagnostic plots.
+		invariantMass = aida.histogram1D("Trigger Plots :: Invariant Mass Distribution", 1500, 0.0, 0.03);
+		pInvariantMass = aida.histogram1D("Trigger Plots :: Invariant Mass Distribution (Passed Pair Cuts)", 1500, 0.0, 0.03);
+		
+		// Initialize the seed percentage of cluster energy.
+		seedPercent = aida.histogram1D("Analysis Plots :: Seed Percentage of Total Energy", 400, 0.0, 1.0);
+	}
+	
+	protected boolean triggerDecision(EventHeader event) {
+		// If the active cluster pair has a null value, then there were
+		// fewer than two clusters in the buffer and we can not trigger.
+		if(!useClusterTriplet && (clusterPair[0] == null || clusterPair[1] == null)) {
+			// VERBOSE :: Note that triggering failed due to insufficient
+			// clusters. in the cluster buffer.
+			if(verbose) { System.out.println("Inufficient clusters in buffer -- no trigger."); }
+			
+			// Return false; we can not trigger without two clusters.
+			return false;
+		}
+		
+		// If the active cluster triplet has a null value, then there
+		// were fewer than three clusters in the buffer and we can not
+		// trigger.
+		if(useClusterTriplet && (clusterTriplet[0] == null || clusterTriplet[1] == null || clusterTriplet[2] == null)) {
+			// VERBOSE :: Note that triggering failed due to insufficient
+			// clusters. in the cluster buffer.
+			if(verbose) { System.out.println("Inufficient clusters in buffer -- no trigger."); }
+			
+			// Return false; we can not trigger without three clusters.
+			return false;
+		}
+		
+		// Increment the number of pairs considered.
+		allPairs++;
+		
+		// Get the cluster position indices.
+		int[] ix = { clusterPair[0].getSeedHit().getIdentifierFieldValue("ix"), clusterPair[1].getSeedHit().getIdentifierFieldValue("ix") };
+		int[] iy = { clusterPair[0].getSeedHit().getIdentifierFieldValue("iy"), clusterPair[1].getSeedHit().getIdentifierFieldValue("iy") };
+		
+		// VERBOSE :: Output the clusters selected for triggering.
+		if(verbose) {
+			System.out.printf("\tTesting first cluster at (%d, %d) with total energy %f and seed energy %f.%n",
+					ix[0], iy[0], clusterPair[0].getSeedHit().getCorrectedEnergy(), clusterPair[0].getEnergy());
+			System.out.printf("\tTesting second cluster at (%d, %d) with total energy %f and seed energy %f.%n",
+					ix[1], iy[1], clusterPair[1].getSeedHit().getCorrectedEnergy(), clusterPair[1].getEnergy());
+			if(useClusterTriplet) {
+				System.out.printf("\tTesting third cluster at (%d, %d) with total energy %f and seed energy %f.%n",
+						ix[1], iy[1], clusterTriplet[2].getSeedHit().getCorrectedEnergy(), clusterTriplet[2].getEnergy());
+			}
+		}
+		
+		if(!useClusterTriplet) {
+			// Fill the uncut histograms.
+			pairEnergySum.fill(getEnergySumValue(clusterPair));
+			invariantMass.fill(getInvariantMassValue(clusterPair));
+			
+			// VERBOSE :: Output the cluster pair trigger thresholds.
+			if(verbose) {
+				System.out.printf("\tCluster pair energy sum threshold     :: %f%n", pairEnergySumThresholdLow);
+				System.out.printf("\tHypothetical invariant mass threshold :: [%f, %f]%n%n", invariantMassThresholdLow, invariantMassThresholdHigh);
+			}
+			
+			// Perform the cluster pair checks.
+			boolean energySumCut = pairEnergySumCut(clusterPair);
+			boolean invariantMassCut = pairInvariantMassCut(clusterPair);
+			
+			// Increment the pair cut counts.
+			if(energySumCut) {
+				pairEnergySumCount++;
+				if(invariantMassCut) {
+					pairInvariantMassCount++;
+				}
+			}
+			
+			// VERBOSE :: Note the outcome of the trigger cuts.
+			if(verbose) {
+				System.out.printf("\tPassed energy sum cut     :: %b%n", energySumCut);
+				System.out.printf("\tPassed invariant mass cut :: %b%n%n", invariantMassCut);
+			}
+			
+			// If the pair passes both cuts, we have a trigger.
+			if(energySumCut && invariantMassCut) {
+				// Fill the cut histograms.
+				pPairEnergySum.fill(getEnergySumValue(clusterPair));
+				pInvariantMass.fill(getInvariantMassValue(clusterPair));
+				
+				// Fill the all cuts histograms.
+				aClusterHitCount.fill(clusterPair[0].getCalorimeterHits().size());
+				aClusterHitCount.fill(clusterPair[1].getCalorimeterHits().size());
+				aClusterTotalEnergy.fill(clusterPair[0].getEnergy());
+				aClusterTotalEnergy.fill(clusterPair[1].getEnergy());
+				aClusterSeedEnergy.fill(clusterPair[0].getSeedHit().getCorrectedEnergy());
+				aClusterSeedEnergy.fill(clusterPair[1].getSeedHit().getCorrectedEnergy());
+				aClusterDistribution.fill(ix[0], iy[0], 1);
+				aClusterDistribution.fill(ix[1], iy[1], 1);
+				
+				// VERBOSE :: Note that the event has triggered.
+				if(verbose) { System.out.println("Event triggers!\n\n"); }
+				
+				// Increment the number of triggers.
+				triggers++;
+				
+				// Return the trigger.
+				return true;
+			}
+		}
+		
+		// If we are using a cluster triplet, apply the cluster triplet
+		// cuts.
+		else {
+			// Perform the cluster triplet checks.
+			boolean energySumCut = tripletEnergySumCut(clusterTriplet);
+			boolean horizontalCut = tripletHorizontalCut(clusterTriplet);
+			boolean energySpatialCut = tripletTotalEnergyCut(clusterTriplet);
+			
+			// Fill the all cuts histograms.
+			aClusterHitCount.fill(clusterPair[0].getCalorimeterHits().size());
+			aClusterHitCount.fill(clusterPair[1].getCalorimeterHits().size());
+			aClusterTotalEnergy.fill(clusterPair[0].getEnergy());
+			aClusterTotalEnergy.fill(clusterPair[1].getEnergy());
+			aClusterSeedEnergy.fill(clusterPair[0].getSeedHit().getCorrectedEnergy());
+			aClusterSeedEnergy.fill(clusterPair[1].getSeedHit().getCorrectedEnergy());
+			aClusterDistribution.fill(ix[0], iy[0], 1);
+			aClusterDistribution.fill(ix[1], iy[1], 1);
+			
+			if(energySumCut && horizontalCut && energySpatialCut) {
+				return true;
+			}
+		}
+		
+		// VERBOSE :: Note that the event has failed to trigger.
+		if(verbose) { System.out.println("No trigger.\n\n"); }
+		
+		// If one or more of the pair cuts failed, the we do not trigger.
+		return false;
+	}
+	
+	// ==================================================================
+	// ==== Trigger Cut Methods =========================================
+	// ==================================================================
+	
+	/**
+	 * Checks whether the cluster passes the threshold for minimum
+	 * number of component hits.
+	 * @param cluster - The cluster to check.
+	 * @return Returns <code>true</code> if the cluster passes and <code>
+	 * false</code> if it does not.
+	 */
+	private boolean clusterHitCountCut(HPSEcalCluster cluster) {
+		return cluster.getCalorimeterHits().size() >= clusterHitCountThreshold;
+	}
+	
+	/**
+	 * Checks whether the cluster falls within the allowed range for
+	 * the seed hit energy cut.
+	 * @param cluster - The cluster to check.
+	 * @return Returns <code>true</code> if the cluster passes and <code>
+	 * false</code> if it does not.
+	 */
+	private boolean clusterSeedEnergyCut(HPSEcalCluster cluster) {
+		// Get the seed energy value.
+		double seedEnergy = cluster.getSeedHit().getCorrectedEnergy();
+		
+		// Perform the seed energy cut.
+		return seedEnergy >= clusterSeedEnergyThresholdLow && seedEnergy <= clusterSeedEnergyThresholdHigh;
+	}
+	
+	/**
+	 * Checks whether the cluster passes the threshold for minimum
+	 * total cluster energy.
+	 * @param cluster - The cluster to check.
+	 * @return Returns <code>true</code> if the cluster passes and <code>
+	 * false</code> if it does not.
+	 */
+	private boolean clusterTotalEnergyCut(HPSEcalCluster cluster) {
+		// Get the cluster energy.
+		double clusterEnergy = cluster.getEnergy();
+		
+		// Perform the cut.
+		return clusterEnergy >= clusterTotalEnergyThresholdLow && clusterEnergy <= clusterTotalEnergyThresholdHigh;
+	}
+	
+	/**
+	 * Calculates the value used in the pair energy sum cut from a pair
+	 * of two clusters.
+	 * @param clusterPair - The cluster pair from which to derive the
+	 * cut value.
+	 * @return Returns the cut value as a <code>double</code>.
+	 */
+	private static double getEnergySumValue(HPSEcalCluster[] clusterGroup) {
+		// Track the sum.
+		double energySum = 0.0;
+		
+		// Add the energies of all clusters in the array.
+		for(HPSEcalCluster cluster : clusterGroup) { energySum += cluster.getEnergy(); }
+		
+		// Return the sum.
+		return energySum;
+	}
+	
+	/**
+	 * Calculates the value used in the invariant mass cut from a pair
+	 * of two clusters.
+	 * @param clusterPair - The cluster pair from which to derive the
+	 * cut value.
+	 * @return Returns the cut value as a <code>double</code>.
+	 */
+	private double getInvariantMassValue(HPSEcalCluster[] clusterPair) {
+		// Store the x/y positions for the seeds.
+		double x[] = new double[2];
+		double y[] = new double[2];
+		
+		// Get the seed hits.
+		CalorimeterHit[] seed = { clusterPair[0].getSeedHit(), clusterPair[1].getSeedHit() };
+		
+		// Set the positions for each seed.
+		for(int index = 0; index < seed.length; index++) {
+			// Get the seed position array stored in the position map.
+			Double[] seedPos = seedPosMap.get(clusterPair[index].getSeedHit());
+			
+			// If there is a position array for the seed, use it.
+			if(seedPos != null) {
+				x[index] = seedPos[0];
+				y[index] = seedPos[1];
+			}
+			
+			// Otherwise, calculate the position at the crystal face.
+			else {
+				// Get the position and store it in a double array.
+				IGeometryInfo geom = clusterPair[index].getSeedHit().getDetectorElement().getGeometry();
+				double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),
+						(Hep3Vector) new BasicHep3Vector(0, 0, -1 * ((Trd) geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
+				
+				// Set the seed location.
+				x[index] = pos[0];
+				y[index] = pos[1];
+				
+				// Store the seed location for future use.
+				Double[] positionVec = { pos[0], pos[1], pos[2] };
+				seedPosMap.put(clusterPair[index].getSeedHit(), positionVec);
+			}
+		}
+		
+		// Get the cluster energy for each seed.
+		double[] e = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
+		
+		//Return the invariant mass.
+		return (e[0] * e[1] * (Math.pow(x[0] - x[1], 2) + Math.pow(y[0] - y[1], 2)) / D2);
+	}
+	
+	/**
+	 * Indicates whether a cluster has a seed hit located on the edge
+	 * of the calorimeter or not.
+	 * 
+	 * @param cluster - The cluster to check.
+	 * @return Returns <code>true</code> if the cluster seed is on the
+	 * edge of the calorimeter and <code>false</code> otherwise.
+	 */
+	private static boolean isEdgeCluster(HPSEcalCluster cluster) {
+		// Get the x- and y-indices of the cluster seed hit.
+		int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
+		int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
+		
+		// Track whether the cluster is an edge cluster or not.
+    	boolean edge = false;
+    	
+    	// Get the absolute values of the coordinates.
+    	int aix = Math.abs(ix);
+    	int aiy = Math.abs(iy);
+    	
+    	// Check if this an outer edge crystal.
+    	if(aix == 23 || aiy == 5) { edge = true; }
+    	
+    	// Check if this along the central beam gap.
+    	if(aiy == 1) { edge = true; }
+    	
+    	// Check if this is around the beam gap.
+    	if(aiy == 2 && (ix >= -11 && ix <= -1)) { edge = true; }
+    	
+    	// Otherwise, this is not an edge crystal.
+    	return edge;
+	}
+	
+	/**
+	 * Checks whether the cluster pair passes the falls within the
+	 * allowed range for the piar energy sum cut.
+	 * @param clusterPair - An array of size two containing the cluster
+	 * pair to check.
+	 * @return Returns <code>true</code> if the clusters pass and <code>
+	 * false</code> if they does not.
+	 */
+	private boolean pairEnergySumCut(HPSEcalCluster[] clusterPair) {
+		// Get the energy sum value.
+		double energySum = getEnergySumValue(clusterPair);
+		
+		// Otherwise, get the energy sum and compare it to the threshold.
+		return energySum >= pairEnergySumThresholdLow && energySum <= pairEnergySumThresholdHigh;
+	}
+	
+	/**
+	 * Checks whether the cluster pair passes the threshold for the
+	 * invariant mass check.
+	 * @param clusterPair - An array of size two containing the cluster
+	 * pair to check.
+	 * @return Returns <code>true</code> if the clusters pass and <code>
+	 * false</code> if they does not.
+	 */
+	private boolean pairInvariantMassCut(HPSEcalCluster[] clusterPair) {
+		// Calculate the invariant mass.
+		double myy2 = getInvariantMassValue(clusterPair);
+		
+		// Perform the cut.
+		return ( (myy2 >= invariantMassThresholdLow) && (myy2 <= invariantMassThresholdHigh));
+	}
+	
+	/**
+	 * Checks whether the cluster pair passes the threshold for the
+	 * minimum pair energy sum check.
+	 * @param clusterTriplet - An array of size three containing the
+	 * cluster triplet to check.
+	 * @return Returns <code>true</code> if the clusters pass and <code>
+	 * false</code> if they does not.
+	 */
+	private boolean tripletEnergySumCut(HPSEcalCluster[] clusterTriplet) {
+		return (getEnergySumValue(clusterTriplet) >= tripletEnergySumThreshold);
+	}
+	
+	/**
+	 * Checks that there is at least one cluster is located on the right
+	 * side and at least one cluster on the left side of the calorimeter.
+	 * @param clusterTriplet - An array of size three containing the
+	 * cluster triplet to check.
+	 * @return Returns <code>true</code> if the clusters pass and <code>
+	 * false</code> if they does not.
+	 */
+	private static boolean tripletHorizontalCut(HPSEcalCluster[] clusterTriplet) {
+		// Track whether a cluster has occurred on each horizontal side
+		// of the calorimeter.
+		boolean leftCluster = false;
+		boolean rightCluster = false;
+		
+		// Sort through the cluster triplet and check where they occur.
+		for(HPSEcalCluster cluster : clusterTriplet) {
+			int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
+			if(ix < 0) { leftCluster = true; }
+			if(ix > 0) { rightCluster = true; }
+		}
+		
+		// If a cluster fell on both sides, it passes.
+		if(leftCluster && rightCluster) { return true; }
+		else { return false; }
+	}
+	
+	private boolean tripletTotalEnergyCut(HPSEcalCluster[] clusterTriplet) {
+		// Check to see if each cluster passes the check.
+		for(HPSEcalCluster cluster1 : clusterTriplet) {
+			for(HPSEcalCluster cluster2 : clusterTriplet) {
+				// The cluster pair must be two different clusters.
+				if(cluster1 == cluster2) { continue; }
+				
+				// Check to see if the clusters are over threshold.
+				boolean over1 = cluster1.getEnergy() >= tripletTotalEnergyThreshold;
+				boolean over2 = cluster1.getEnergy() >= tripletTotalEnergyThreshold;
+				
+				// If both the clusters are over threshold, check that
+				// they are sufficiently far apart.
+				if(over1 && over2) {
+					// Get the x and y coordinates of the clusters.
+					double x[] = { cluster1.getPosition()[0], cluster2.getPosition()[0] };
+					double y[] = { cluster1.getPosition()[1], cluster2.getPosition()[1] };
+					
+					// Calculate the distance between the clusters.
+					double dr = Math.sqrt(x[0] * x[0] + y[0] * y[0]);
+					
+					// Run the check.
+					if(dr >= tripletPairSeparationThreshold) { return true; }
+				}
+			}
+		}
+		
+		// If none of the cluster pairs pass all the checks, the
+		// triplet fails.
+		return false;
+	}
+	
+	// ==================================================================
+	// ==== Variables Mutator Methods ===================================
+	// ==================================================================
+	
+	/**
+	 * Sets the LCIO collection name where <code>HPSEcalCluster</code>
+	 * objects are stored for use in the trigger.
+	 * @param clusterCollectionName - The name of the LCIO collection.
+	 */
+	public void setClusterCollectionName(String clusterCollectionName) {
+		this.clusterCollectionName = clusterCollectionName;
+	}
+	
+	/**
+	 * Sets the minimum number of hits required for a cluster to be
+	 * used in triggering.
+	 * @param clusterHitCountThreshold - The smallest number of hits
+	 * in a cluster.
+	 */
+	public void setClusterHitCountThreshold(int clusterHitCountThreshold) {
+		this.clusterHitCountThreshold = clusterHitCountThreshold;
+	}
+	
+	/**
+	 * Sets the threshold for the cluster seed energy of individual
+	 * clusters above which the cluster will be rejected and not used
+	 * for triggering.
+	 * @param clusterSeedEnergyThresholdHigh - The cluster seed energy
+	 * lower bound.
+	 */
+	public void setClusterSeedEnergyThresholdHigh(double clusterSeedEnergyThresholdHigh) {
+		this.clusterSeedEnergyThresholdHigh = clusterSeedEnergyThresholdHigh;
+	}
+	
+	/**
+	 * Sets the threshold for the cluster seed energy of individual
+	 * clusters under which the cluster will be rejected and not used
+	 * for triggering.
+	 * @param clusterSeedEnergyThresholdLow - The cluster seed energy
+	 * lower bound.
+	 */
+	public void setClusterSeedEnergyThresholdLow(double clusterSeedEnergyThresholdLow) {
+		this.clusterSeedEnergyThresholdLow = clusterSeedEnergyThresholdLow;
+	}
+	
+	/**
+	 * Sets the threshold for the total cluster energy of individual
+	 * clusters under which the cluster will be rejected and not used
+	 * for triggering.
+	 * @param clusterTotalEnergyThresholdLow - The cluster total energy
+	 * lower bound.
+	 */
+	public void setClusterTotalEnergyThresholdLow(double clusterTotalEnergyThresholdLow) {
+		this.clusterTotalEnergyThresholdLow = clusterTotalEnergyThresholdLow;
+	}
+	
+	/**
+	 * Sets the threshold for the total cluster energy of individual
+	 * clusters above which the cluster will be rejected and not used
+	 * for triggering.
+	 * @param clusterTotalEnergyThresholdHigh - The cluster total energy
+	 * upper bound.
+	 */
+	public void setClusterTotalEnergyThresholdHigh(double clusterTotalEnergyThresholdHigh) {
+		this.clusterTotalEnergyThresholdHigh = clusterTotalEnergyThresholdHigh;
+	}
+	
+	/**
+	 * Sets the number of events that clusters will be retained and
+	 * employed for triggering before they are cleared.
+	 * @param coincidenceWindow - The number of events that clusters
+	 * should be retained.
+	 */
+	public void setCoincidenceWindow(int coincidenceWindow) {
+		this.coincidenceWindow = coincidenceWindow;
+	}
+	
+	/**
+	 * Sets the invariant mass threshold to accept only cluster pairs
+	 * with a reconstructed invariant mass within a certain number of
+	 * standard deviations of the mean (corrected for sampling fraction).
+	 * @param invariantMassSigma - The number of standard deviations
+	 * within which a cluster pair invariant mass is accepted.
+	 */
+	public void setInvariantMassSigma(int invariantMassSigma) {
+		this.invariantMassThresholdLow = 0.012499 - (invariantMassSigma * 0.0011095);
+		this.invariantMassThresholdHigh = 0.012499 + (invariantMassSigma * 0.0011095);
+	}
+	
+	/**
+	 * Sets the threshold for the calculated invariant mass of the
+	 * generating particle (assuming that the clusters are produced
+	 * by a positron/electron pair) above which the cluster pair will
+	 * be rejected and not produce a trigger.
+	 * @param invariantMassThresholdHigh - The invariant mass upper
+	 * bound.
+	 */
+	public void setInvariantMassThresholdHigh(double invariantMassThresholdHigh) {
+		this.invariantMassThresholdHigh = invariantMassThresholdHigh;
+	}
+	
+	/**
+	 * Sets the threshold for the calculated invariant mass of the
+	 * generating particle (assuming that the clusters are produced
+	 * by a positron/electron pair) under which the cluster pair will
+	 * be rejected and not produce a trigger.
+	 * @param invariantMassThresholdLow - The invariant mass lower
+	 * bound.
+	 */
+	public void setInvariantMassThresholdLow(double invariantMassThresholdLow) {
+		this.invariantMassThresholdLow = invariantMassThresholdLow;
+	}
+	
+	/**
+	 * Sets the threshold for the sum of the energies of a cluster pair
+	 * above which the pair will be rejected and not produce a trigger.
+	 * @param pairEnergySumThresholdHigh - The cluster pair energy sum
+	 * upper bound.
+	 */
+	public void setPairEnergySumThresholdHigh(double pairEnergySumThresholdHigh) {
+		this.pairEnergySumThresholdHigh = pairEnergySumThresholdHigh;
+	}
+	
+	/**
+	 * Sets the threshold for the sum of the energies of a cluster pair
+	 * under which the pair will be rejected and not produce a trigger.
+	 * @param pairEnergySumThresholdLow - The cluster pair energy sum
+	 * lower bound.
+	 */
+	public void setPairEnergySumThresholdLow(double pairEnergySumThresholdLow) {
+		this.pairEnergySumThresholdLow = pairEnergySumThresholdLow;
+	}
+	
+	/**
+	 * Sets whether clusters centered on an edge crystal should be
+	 * used for triggering or not.
+	 * 
+	 * @param rejectEdgeCrystals - <code>true</code> means that edge
+	 * clusters will not be used and <code>false</code> means that they
+	 * will be used.
+	 */
+	public void setRejectEdgeCrystals(boolean rejectEdgeCrystals) {
+		this.rejectEdgeCrystals = rejectEdgeCrystals;
+	}
+	
+	/**
+	 * Sets the threshold for the sum of the energies of a cluster triplet
+	 * under which the triplet will be rejected and not produce a trigger.
+	 * @param tripletEnergySumThreshold - The cluster triplet energy sum
+	 * lower bound.
+	 */
+	public void setTripletEnergySumThreshold(double tripletEnergySumThreshold) {
+		this.tripletEnergySumThreshold = tripletEnergySumThreshold;
+	}
+	
+	/**
+	 * Sets the minimum distance apart for a cluster pair within a
+	 * cluster triplet. Clusters that are not sufficiently far apart
+	 * are rejected and do not trigger. 
+	 * @param tripletPairSeparationThreshold - The minimum distance in
+	 * millimeters.
+	 */
+	public void setTripletPairSeparationThreshold(double tripletPairSeparationThreshold) {
+		this.tripletPairSeparationThreshold = tripletPairSeparationThreshold;
+	}
+	
+	/**
+	 * Sets the threshold for which at least two clusters in a cluster
+	 * triplet will be required to surpass. Cluster triplets with one
+	 * or fewer clusters above the threshold will be rejected.
+	 * @param tripletTotalEnergyThreshold - The cluster total energy
+	 * that two clusters must pass.
+	 */
+	public void setTripletTotalEnergyThreshold(double tripletTotalEnergyThreshold) {
+		this.tripletTotalEnergyThreshold = tripletTotalEnergyThreshold;
+	}
+	
+	/**
+	 * Toggles whether the driver will output its actions to the console
+	 * during run time or not.
+	 * @param verbose - <code>true</code> indicates that the console
+	 * will write its actions and <code>false</code> that it will not.
+	 */
+	public void setVerbose(boolean verbose) {
+		this.verbose = verbose;
+	}
+	
+	/**
+	 * Toggles whether the driver triggers off of a pair of clusters
+	 * or a triplet of clusters.
+	 * @param useClusterTriplet - <code>true</code> indicates that a
+	 * triplet should be used and <code>false</code> that a pair should
+	 * be used.
+	 */
+	public void setUseClusterTriplet(boolean useClusterTriplet) {
+		this.useClusterTriplet = useClusterTriplet;
+	}
+	
+	// ==================================================================
+	// ==== AIDA Plots ==================================================
+	// ==================================================================
+	IHistogram2D aClusterDistribution;
+	IHistogram1D aClusterHitCount;
+	IHistogram1D aClusterSeedEnergy;
+	IHistogram1D aClusterTotalEnergy;
+	IHistogram2D clusterDistribution;
+	IHistogram1D clusterHitCount;
+	IHistogram1D clusterSeedEnergy;
+	IHistogram1D clusterTotalEnergy;
+	IHistogram1D invariantMass;
+	IHistogram1D pairEnergySum;
+	IHistogram1D pClusterHitCount;
+	IHistogram2D pClusterDistribution;
+	IHistogram1D pClusterSeedEnergy;
+	IHistogram1D pClusterTotalEnergy;
+	IHistogram1D pPairEnergySum;
+	IHistogram1D pInvariantMass;
+	IHistogram1D seedPercent;
+	
+	// ==================================================================
+	// ==== Variables ===================================================
+	// ==================================================================
+	
+	/**
+	 * <b>aida</b><br/><br/>
+	 * <code>private AIDA <b>aida</b></code><br/><br/>
+	 * Factory for generating histograms.
+	 */
+	private AIDA aida = AIDA.defaultInstance();
+	
+	/**
+	 * <b>clusterBuffer</b><br/><br/>
+	 * <code>private LinkedList<List<HPSEcalCluster>> <b>clusterBuffer</b></code><br/><br/>
+	 * Stores the list of clusters from each event for a finite-sized
+	 * buffer. The size of the buffer is determined by the coincidence
+	 * window.
+	 */
+	private LinkedList<List<HPSEcalCluster>> clusterBuffer;
+	
+	/**
+	 * <b>clusterCollectionName</b><br/><br/>
+	 * <code>private String <b>clusterCollectionName</b></code><br/><br/>
+	 * The name of the LCIO collection containing <code>HPSEcalCluster
+	 * </code> objects.
+	 */
+	private String clusterCollectionName = "EcalClusters";
+	
+	/**
+	 * <b>clusterPair</b><br/><br/>
+	 * <code>private HPSEcalCluster[] <b>clusterPair</b></code><br/><br/>
+	 * Stores the two highest energy clusters located in the cluster
+	 * buffer. These are sorted by energy, with the highest energy
+	 * cluster first in the array.
+	 */
+	private HPSEcalCluster[] clusterPair = new HPSEcalCluster[2];
+	
+	/**
+	 * <b>clusterHitCountThreshold</b><br/><br/>
+	 * <code>private int <b>clusterHitCountThreshold</b></code><br/><br/>
+	 * Defines the minimum number of hits required for a cluster to
+	 * be used in triggering.
+	 */
+	private int clusterHitCountThreshold = 5;
+	
+	/**
+	 * <b>clusterSeedEnergyThresholdLow</b><br/><br/>
+	 * <code>private double <b>clusterSeedEnergyThresholdLow</b></code><br/><br/>
+	 * Defines the threshold for the cluster seed energy under which
+	 * a cluster will be rejected.
+	 */
+	private double clusterSeedEnergyThresholdLow = 0.15;
+	
+	/**
+	 * <b>clusterSeedEnergyThresholdHigh</b><br/><br/>
+	 * <code>private double <b>clusterSeedEnergyThresholdHigh</b></code><br/><br/>
+	 * Defines the threshold for the cluster seed energy above which
+	 * a cluster will be rejected.
+	 */
+	private double clusterSeedEnergyThresholdHigh = 1.00;
+	
+	/**
+	 * <b>clusterTotalEnergyThresholdLow</b><br/><br/>
+	 * <code>private double <b>clusterTotalEnergyThreshold</b></code><br/><br/>
+	 * Defines the threshold for the total cluster energy under which
+	 * a cluster will be rejected.
+	 */
+	private double clusterTotalEnergyThresholdLow = 0.0;
+	
+	/**
+	 * <b>clusterTotalEnergyThresholdHigh</b><br/><br/>
+	 * <code>private double <b>clusterTotalEnergyThresholdHigh</b></code><br/><br/>
+	 * Defines the threshold for the total cluster energy above which
+	 * a cluster will be rejected.
+	 */
+	private double clusterTotalEnergyThresholdHigh = Double.MAX_VALUE;
+	
+	/**
+	 * <b>clusterTriplet</b><br/><br/>
+	 * <code>private HPSEcalCluster[] <b>clusterTriplet</b></code><br/><br/>
+	 * Stores the three highest energy clusters located in the cluster
+	 * buffer. These are sorted by energy, with the highest energy
+	 * cluster first in the array.
+	 */
+	private HPSEcalCluster[] clusterTriplet = new HPSEcalCluster[3]; 
+	
+	/**
+	 * <b>coincidenceWindow</b><br/><br/>
+	 * <code>private int <b>coincidenceWindow</b></code><br/><br/>
+	 * The number of events for which clusters will be retained and
+	 * used in the trigger before they are removed.
+	 */
+	private int coincidenceWindow = 3;
+	
+	/**
+	 * <b>D2</b><br/><br/>
+	 * <code>private static final double <b>D2</b></code><br/><br/>
+	 * The squared distance of the calorimeter from the target.
+	 */
+	private static final double D2 = 1414 * 1414; // (1414^2 mm^2)
+	
+	/**
+	 * <b>invariantMassThresholdHigh</b><br/><br/>
+	 * <code>private double <b>invariantMassThresholdHigh</b></code><br/><br/>
+	 * Defines the threshold for the invariant mass of the generating
+	 * particle above which the cluster pair will be rejected.
+	 */
+	private double invariantMassThresholdHigh = 0.01472;
+	
+	/**
+	 * <b>invariantMassThresholdLow</b><br/><br/>
[truncated at 1000 lines; 84 more skipped]

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
ReadoutTimestamp.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/ReadoutTimestamp.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/ReadoutTimestamp.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,109 @@
+package org.hps.readout.ecal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
+
+/**
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: ReadoutTimestamp.java,v 1.1 2013/03/20 00:09:42 meeg Exp $
+ */
+public class ReadoutTimestamp implements GenericObject {
+
+    public static final String collectionName = "ReadoutTimestamps";
+    public static final int SYSTEM_TRIGGERBITS = 0;
+    public static final int SYSTEM_TRACKER = 1;
+    public static final int SYSTEM_ECAL = 2;
+    public static final int SYSTEM_TRIGGERTIME = 3;
+    private int system;
+    private double time;
+
+    public ReadoutTimestamp(int system, double time) {
+        this.system = system;
+        this.time = time;
+    }
+
+    public static void addTimestamp(TriggerableDriver triggerable, EventHeader event) {
+        ReadoutTimestamp timestamp = new ReadoutTimestamp(triggerable.getTimestampType(), triggerable.readoutDeltaT());
+        /*
+        if (TriggerDriver.class.isInstance(triggerable)) {
+            timestamp = new ReadoutTimestamp(SYSTEM_TRIGGER, triggerable.readoutDeltaT());
+        } else if (SimpleSvtReadout.class.isInstance(triggerable)) {
+            timestamp = new ReadoutTimestamp(SYSTEM_TRACKER, triggerable.readoutDeltaT());
+        } else if (EcalReadoutDriver.class.isInstance(triggerable)) {
+            timestamp = new ReadoutTimestamp(SYSTEM_ECAL, triggerable.readoutDeltaT());
+        }*/        
+        addTimestamp(timestamp, event);
+    }
+
+    public static void addTimestamp(ReadoutTimestamp timestamp, EventHeader event) {
+        List<ReadoutTimestamp> timestamps;
+        if (event.hasCollection(ReadoutTimestamp.class, collectionName)) {
+            timestamps = event.get(ReadoutTimestamp.class, collectionName);
+        } else {
+            timestamps = new ArrayList<ReadoutTimestamp>();
+            event.put(collectionName, timestamps, ReadoutTimestamp.class, 0);
+        }
+        timestamps.add(timestamp);
+    }
+
+    public static double getTimestamp(int system, EventHeader event) {
+        if (event.hasCollection(GenericObject.class, collectionName)) {
+            List<GenericObject> timestamps = event.get(GenericObject.class, collectionName);
+            for (GenericObject timestamp : timestamps) {
+                if (timestamp.getIntVal(0) == system) {
+                    return timestamp.getDoubleVal(0);
+                }
+            }
+            return 0;
+        } else {
+            return 0;
+        }
+    }
+
+    @Override
+    public int getNInt() {
+        return 1;
+    }
+
+    @Override
+    public int getNFloat() {
+        return 0;
+    }
+
+    @Override
+    public int getNDouble() {
+        return 1;
+    }
+
+    @Override
+    public int getIntVal(int index) {
+        if (index == 0) {
+            return system;
+        } else {
+            throw new ArrayIndexOutOfBoundsException();
+        }
+    }
+
+    @Override
+    public float getFloatVal(int index) {
+        throw new ArrayIndexOutOfBoundsException();
+    }
+
+    @Override
+    public double getDoubleVal(int index) {
+        if (index == 0) {
+            return time;
+        } else {
+            throw new ArrayIndexOutOfBoundsException();
+        }
+    }
+
+    @Override
+    public boolean isFixedSize() {
+        return true;
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
RingBuffer.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/RingBuffer.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/RingBuffer.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,55 @@
+package org.hps.readout.ecal;
+
+/**
+ * Ring buffer for storing ECal and SVT signals for trigger and readout
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: RingBuffer.java,v 1.5 2012/04/10 01:00:13 meeg Exp $
+ */
+public class RingBuffer {
+
+	protected double[] array;
+	protected int ptr;
+
+	public RingBuffer(int size) {
+		array = new double[size]; //initialized to 0
+		ptr = 0;
+	}
+
+	/**
+	 * 
+	 * @return value stored at current cell
+	 */
+	public double currentValue() {
+		return array[ptr];
+	}
+
+	//return content of specified cell (pos=0 for current cell)
+	public double getValue(int pos) {
+		return array[((ptr + pos) % array.length + array.length) % array.length];
+	}
+
+	/**
+	 * Clear value at current cell and step to the next one
+	 */
+	public void step() {
+		array[ptr] = 0;
+		ptr++;
+		if (ptr == array.length) {
+			ptr = 0;
+		}
+	}
+
+	/**
+	 * Add given value to specified cell
+	 * @param pos Target position relative to current cell (pos=0 for current cell)
+	 * @param val 
+	 */
+	public void addToCell(int pos, double val) {
+		array[(ptr + pos) % array.length] += val;
+	}
+
+    public int getLength() {
+        return array.length;
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
SimpleEcalReadoutDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/SimpleEcalReadoutDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/SimpleEcalReadoutDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,59 @@
+package org.hps.readout.ecal;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.recon.ecal.HPSCalorimeterHit;
+import org.lcsim.event.CalorimeterHit;
+
+/**
+ * Performs readout of ECal hits.
+ * No time evolution - this just integrates all hits in a cycle.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: SimpleEcalReadoutDriver.java,v 1.1 2013/02/25 22:39:26 meeg Exp $
+ */
+public class SimpleEcalReadoutDriver extends EcalReadoutDriver<HPSCalorimeterHit> {
+    //buffer for deposited energy
+    Map<Long, Double> eDepMap = null;
+
+	public SimpleEcalReadoutDriver() {
+		hitClass = HPSCalorimeterHit.class;
+	}
+
+    @Override
+    protected void readHits(List<HPSCalorimeterHit> hits) {
+        for (Long cellID : eDepMap.keySet()) {
+//            int ix = dec.getValue("ix");
+//            int iy = dec.getValue("iy");
+//            //temporary hack to disable crystals and flip X coordinate
+//            int side = dec.getValue("side");
+//            if (iy == 1 && ix*side >= -10 && ix*side <= -2)
+//                continue;
+            if (eDepMap.get(cellID) > threshold)
+                hits.add(new HPSCalorimeterHit(eDepMap.get(cellID), readoutTime(), cellID, hitType));
+        }
+        //reset hit integration
+        eDepMap = new HashMap<Long, Double>();
+    }
+
+    @Override
+    protected void putHits(List<CalorimeterHit> hits) {
+        //fill the readout buffers
+        for (CalorimeterHit hit : hits) {
+            Double eDep = eDepMap.get(hit.getCellID());
+            if (eDep == null) {
+                eDepMap.put(hit.getCellID(), hit.getRawEnergy());
+            } else {
+                eDepMap.put(hit.getCellID(), eDep + hit.getRawEnergy());
+            }
+        }
+    }
+
+    @Override
+    protected void initReadout() {
+        //initialize buffers
+        eDepMap = new HashMap<Long, Double>();
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
TestRunTriggerDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TestRunTriggerDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TestRunTriggerDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,108 @@
+package org.hps.readout.ecal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.hps.recon.ecal.HPSEcalCluster;
+import org.lcsim.event.EventHeader;
+
+/**
+ * Reads clusters and makes trigger decision using opposite quadrant criterion.
+ * Prints triggers to file if file path specified.
+ *
+ * @author Omar Moreno <[log in to unmask]>
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TestRunTriggerDriver.java,v 1.2 2013/03/20 01:21:29 meeg Exp $
+ */
+public class TestRunTriggerDriver extends TriggerDriver {
+
+    boolean triggerThisCycle = false;
+    int cycleCounter = 0;
+    private double clusterEnergyLow = 10;    //
+    int deadtimelessTriggerCount;
+    private int topBits = 0, botBits = 0;
+    protected String clusterCollectionName = "EcalClusters";
+
+    public TestRunTriggerDriver() {
+    }
+
+    @Override
+    protected void makeTriggerData(EventHeader event, String collectionName) {
+        int[] trigArray = new int[8];
+        trigArray[TriggerData.TOP_TRIG] = topBits;
+        trigArray[TriggerData.BOT_TRIG] = botBits;
+        trigArray[TriggerData.AND_TRIG] = topBits & botBits;
+        trigArray[TriggerData.OR_TRIG] = topBits | botBits;
+        TriggerData tData = new TriggerData(trigArray);
+        List<TriggerData> triggerList = new ArrayList<TriggerData>();
+        triggerList.add(tData);
+        event.put(collectionName, triggerList, TriggerData.class, 0);
+    }
+
+    public void setClusterCollectionName(String clusterCollectionName) {
+        this.clusterCollectionName = clusterCollectionName;
+    }
+
+    @Override
+    public void startOfData() {
+        super.startOfData();
+        if (clusterCollectionName == null) {
+            throw new RuntimeException("The parameter clusterCollectionName was not set!");
+        }
+
+        deadtimelessTriggerCount = 0;
+    }
+
+    @Override
+    protected boolean triggerDecision(EventHeader event) {
+        if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+            cycleCounter++;
+            if (testTrigger(event.get(HPSEcalCluster.class, clusterCollectionName))) {
+                triggerThisCycle = true;
+            }
+        }
+
+        if (cycleCounter % 4 == 0) {
+            boolean trigger = triggerThisCycle;
+            triggerThisCycle = false;
+            return trigger;
+        } else {
+            return false;
+        }
+    }
+
+    public boolean testTrigger(List<HPSEcalCluster> clusters) {
+        boolean trigger = false;
+
+        topBits <<= 1;
+        botBits <<= 1;
+        for (HPSEcalCluster cluster : clusters) {
+            if (cluster.getEnergy() > clusterEnergyLow) {
+                if (cluster.getPosition()[1] > 0) {
+                    topBits |= 1;
+                } else {
+                    botBits |= 1;
+                }
+                trigger = true;
+            }
+        }
+        if (trigger) {
+            deadtimelessTriggerCount++;
+        }
+        return trigger;
+    }
+
+    @Override
+    public void endOfData() {
+        if (outputStream != null) {
+            outputStream.printf("Number of cluster pairs after successive trigger conditions:\n");
+            outputStream.printf("Trigger count without dead time: %d\n", deadtimelessTriggerCount);
+            outputStream.printf("Trigger count: %d\n", numTriggers);
+            outputStream.close();
+        }
+        System.out.printf("Number of cluster pairs after successive trigger conditions:\n");
+        System.out.printf("Trigger count without dead time: %d\n", deadtimelessTriggerCount);
+        System.out.printf("Trigger count: %d\n", numTriggers);
+        super.endOfData();
+    }
+}
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
TimeEvolutionEcalReadoutDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TimeEvolutionEcalReadoutDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TimeEvolutionEcalReadoutDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,92 @@
+package org.hps.readout.ecal;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.recon.ecal.HPSCalorimeterHit;
+import org.lcsim.event.CalorimeterHit;
+
+/**
+ * Performs readout of ECal hits.
+ * Simulates time evolution of preamp output pulse.
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TimeEvolutionEcalReadoutDriver.java,v 1.1 2013/02/25 22:39:26 meeg Exp $
+ */
+public class TimeEvolutionEcalReadoutDriver extends EcalReadoutDriver<HPSCalorimeterHit> {
+
+    //buffer for deposited energy
+    Map<Long, RingBuffer> eDepMap = null;
+    //length of ring buffer (in readout cycles)
+    int bufferLength = 20;
+    //shaper time constant in ns; negative values generate square pulses of the given width
+    double t0 = 18.0;
+
+    public TimeEvolutionEcalReadoutDriver() {
+		hitClass = HPSCalorimeterHit.class;
+    }
+
+    public void setT0(double t0) {
+        this.t0 = t0;
+    }
+
+    public void setBufferLength(int bufferLength) {
+        this.bufferLength = bufferLength;
+        eDepMap = new HashMap<Long, RingBuffer>();
+    }
+
+    @Override
+    protected void readHits(List<HPSCalorimeterHit> hits) {
+        for (Long cellID : eDepMap.keySet()) {
+            RingBuffer eDepBuffer = eDepMap.get(cellID);
+            if (eDepBuffer.currentValue() > threshold) {
+//                int ix = dec.getValue("ix");
+//                int iy = dec.getValue("iy");
+//                if (iy == 1 && ix == -2)
+//                    System.out.printf("Time %f, output signal %f\n", ClockSingleton.getTime(), eDepBuffer.currentValue());
+                hits.add(new HPSCalorimeterHit(eDepBuffer.currentValue(), readoutTime(), cellID, hitType));
+            }
+            eDepBuffer.step();
+        }
+    }
+
+    @Override
+    protected void putHits(List<CalorimeterHit> hits) {
+        //fill the readout buffers
+        for (CalorimeterHit hit : hits) {
+//            int ix = dec.getValue("ix");
+//            int iy = dec.getValue("iy");
+//            if (iy == 1 && ix == -2)
+//                System.out.printf("Time %f, input hit %f)\n", ClockSingleton.getTime() + hit.getTime(), hit.getRawEnergy());
+
+            RingBuffer eDepBuffer = eDepMap.get(hit.getCellID());
+            if (eDepBuffer == null) {
+                eDepBuffer = new RingBuffer(bufferLength);
+                eDepMap.put(hit.getCellID(), eDepBuffer);
+            }
+            for (int i = 0; i < bufferLength; i++) {
+                eDepBuffer.addToCell(i, hit.getRawEnergy() * pulseAmplitude((i + 1) * readoutPeriod + readoutTime() - (ClockSingleton.getTime() + hit.getTime())));
+            }
+        }
+    }
+
+    @Override
+    protected void initReadout() {
+        //initialize buffers
+        eDepMap = new HashMap<Long, RingBuffer>();
+    }
+
+    private double pulseAmplitude(double time) {
+        if (time < 0.0)
+            return 0.0;
+        if (t0 > 0.0) {
+            return (time / t0) * Math.exp(1.0 - time / t0);
+        } else {
+            if (time < -t0)
+                return 1.0;
+            else
+                return 0.0;
+        }
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
TriggerData.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TriggerData.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TriggerData.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,111 @@
+package org.hps.readout.ecal;
+
+import org.lcsim.event.GenericObject;
+
+/**
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TriggerData.java,v 1.3 2012/08/03 23:14:39 meeg Exp $
+ */
+public class TriggerData implements GenericObject {
+
+    public static final int OR_TRIG = 3;
+    public static final int TOP_TRIG = 4;
+    public static final int BOT_TRIG = 5;
+    public static final int AND_TRIG = 6;
+    public static final int TIME = 7;
+    public static final int TRIG_BANK_SIZE = 8;
+    public static final String TRIG_COLLECTION = "TriggerBank";
+    private int[] bank;
+
+    public TriggerData(int[] bank) {
+        this.bank = bank;
+    }
+
+    public int getTime() {
+        return getIntVal(TIME);
+    }
+
+    public int getOrTrig() {
+        return getIntVal(OR_TRIG);
+    }
+
+    public int getTopTrig() {
+        return getIntVal(TOP_TRIG);
+    }
+
+    public int getBotTrig() {
+        return getIntVal(BOT_TRIG);
+    }
+
+    public int getAndTrig() {
+        return getIntVal(AND_TRIG);
+    }
+
+    public int[] getBank() {
+        return bank;
+    }
+
+    public static int getTime(GenericObject object) {
+        return object.getIntVal(TIME);
+    }
+
+    public static int getOrTrig(GenericObject object) {
+        return object.getIntVal(OR_TRIG);
+    }
+
+    public static int getTopTrig(GenericObject object) {
+        return object.getIntVal(TOP_TRIG);
+    }
+
+    public static int getBotTrig(GenericObject object) {
+        return object.getIntVal(BOT_TRIG);
+    }
+
+    public static int getAndTrig(GenericObject object) {
+        return object.getIntVal(AND_TRIG);
+    }
+
+    public static int[] getBank(GenericObject object) {
+        int[] bank = new int[8];
+        for (int i = 0; i < 8; i++) {
+            bank[i] = object.getIntVal(i);
+        }
+        return bank;
+    }
+
+    @Override
+    public int getNInt() {
+        return TRIG_BANK_SIZE;
+    }
+
+    @Override
+    public int getNFloat() {
+        return 0;
+    }
+
+    @Override
+    public int getNDouble() {
+        return 0;
+    }
+
+    @Override
+    public int getIntVal(int index) {
+        return bank[index];
+    }
+
+    @Override
+    public float getFloatVal(int index) {
+        throw new UnsupportedOperationException("No float values in " + this.getClass().getSimpleName());
+    }
+
+    @Override
+    public double getDoubleVal(int index) {
+        throw new UnsupportedOperationException("No double values in " + this.getClass().getSimpleName());
+    }
+
+    @Override
+    public boolean isFixedSize() {
+        return true;
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
TriggerDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TriggerDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TriggerDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,193 @@
+package org.hps.readout.ecal;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.lcio.LCIOWriter;
+
+/**
+ * Makes trigger decision and sends trigger to readout drivers. Prints triggers
+ * to file if file path specified. Writes trigger events to LCIO if file path
+ * specified. To implement: extend this class and write your own
+ * triggerDecision().
+ *
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TriggerDriver.java,v 1.7 2013/09/02 21:56:56 phansson Exp $
+ */
+public abstract class TriggerDriver extends TriggerableDriver {
+
+    private boolean _DEBUG = false;
+    protected String outputFileName = null;
+    protected PrintWriter outputStream = null;
+    protected int numTriggers;
+    private static int lastTrigger = Integer.MIN_VALUE;
+    private int deadTime = 0;
+    private static boolean triggerBit = false;
+    private String lcioFile = null;
+    LCIOWriter lcioWriter = null;
+    private static final List<TriggerableDriver> triggerables = new ArrayList<TriggerableDriver>();
+
+    public TriggerDriver() {
+        triggerDelay = 50.0;
+    }
+
+    public void setLcioFile(String lcioFile) {
+        this.lcioFile = lcioFile;
+    }
+
+    /**
+     * Set dead time; 0 for no dead time
+     *
+     * @param deadTime Minimum number of clock ticks between triggers
+     */
+    public void setDeadTime(int deadTime) {
+        this.deadTime = deadTime;
+    }
+
+    public void setOutputFileName(String outputFileName) {
+        this.outputFileName = outputFileName;
+    }
+
+    @Override
+    public void startOfData() {
+//        addTriggerable(this);
+
+        if (outputFileName != null) {
+            try {
+                outputStream = new PrintWriter(new PrintStream(outputFileName), true);
+            } catch (IOException ex) {
+                throw new RuntimeException("Invalid outputFilePath!");
+            }
+        } else {
+            if (_DEBUG) {
+                outputStream = new PrintWriter(System.out, true);
+            }
+        }
+
+        if (lcioFile != null) {
+            try {
+                lcioWriter = new LCIOWriter(new File(lcioFile));
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+        }
+
+        numTriggers = 0;
+    }
+
+    @Override
+    public void process(EventHeader event) {
+//        triggerBit = false; //reset trigger
+        //System.out.println(this.getClass().getCanonicalName() + " - process");
+        if ((lastTrigger == Integer.MIN_VALUE || ClockSingleton.getClock() - lastTrigger > deadTime) && triggerDecision(event)) {
+            sendTrigger();
+            this.addTrigger();
+            for (TriggerableDriver triggerable : triggerables) {
+                ReadoutTimestamp.addTimestamp(triggerable, event);
+            }
+            ReadoutTimestamp.addTimestamp(this, event);
+            triggerBit = true;
+            lastTrigger = ClockSingleton.getClock();
+            numTriggers++;
+            if (_DEBUG) {
+                System.out.printf(this.getClass().getSimpleName() + ": Trigger on event %d\n", event.getEventNumber());
+            }
+            if (outputStream != null) {
+                outputStream.printf("Trigger on event %d\n", event.getEventNumber());
+            }
+
+            // If an ECal trigger signal has been sent store the trigger
+            // time offset by the trigger latencies
+            if (_DEBUG) {
+                System.out.println(this.getClass().getSimpleName() + ": Trigger added on event " + event.getEventNumber());
+            }
+
+            if (outputStream != null) {
+                outputStream.printf("trigger sent to ET event builder on event %d\n", event.getEventNumber());
+            }
+            makeTriggerData(event, "TriggerStatus");
+            if (lcioWriter != null) {
+                try {
+                    lcioWriter.write(event);
+                } catch (IOException ex) {
+                    Logger.getLogger(TriggerDriver.class.getName()).log(Level.SEVERE, null, ex);
+                }
+            }
+        }
+
+        // Check if there are any pending trigger bank triggers to process
+        checkTrigger(event);
+    }
+
+    protected static boolean sendTrigger() {
+        for (TriggerableDriver triggerable : triggerables) {
+            if (!triggerable.isLive()) {
+                return false;
+            }
+        }
+        for (TriggerableDriver triggerable : triggerables) {
+            triggerable.addTrigger();
+        }
+        return true;
+    }
+
+    public static void addTriggerable(TriggerableDriver triggerable) {
+        triggerables.add(triggerable);
+    }
+
+    @Override
+    protected void processTrigger(EventHeader event) {
+        if (outputStream != null) {
+            outputStream.printf("Trigger bank trigger sent on event %d\n", event.getEventNumber());
+        }
+        makeTriggerData(event, "TriggerBank");
+    }
+
+    protected abstract boolean triggerDecision(EventHeader event);
+
+    /**
+     * Make a dummy TriggerData
+     */
+    protected void makeTriggerData(EventHeader event, String collectionName) {
+        TriggerData tData = new TriggerData(new int[8]);
+        List<TriggerData> triggerList = new ArrayList<TriggerData>();
+        triggerList.add(tData);
+        event.put(collectionName, triggerList, TriggerData.class, 0);
+    }
+
+    @Override
+    public void endOfData() {
+        if (outputStream != null) {
+            outputStream.printf("Trigger count: %d\n", numTriggers);
+            outputStream.close();
+        }
+        if (lcioWriter != null) {
+            try {
+                lcioWriter.close();
+            } catch (IOException ex) {
+                Logger.getLogger(TriggerDriver.class.getName()).log(Level.SEVERE, null, ex);
+            }
+        }
+        System.out.printf(this.getClass().getSimpleName() + ": Trigger count: %d\n", numTriggers);
+    }
+
+    @Deprecated
+    public static boolean triggerBit() {
+        return triggerBit;
+    }
+
+    public static void resetTrigger() {
+        triggerBit = false;
+    }
+
+    public int getTimestampType() {
+        return ReadoutTimestamp.SYSTEM_TRIGGERBITS;
+    }
+}

java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal
TriggerableDriver.java added at 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TriggerableDriver.java	                        (rev 0)
+++ java/branches/hps_java_trunk_HPSJAVA-251/ecal-recon/src/main/java/org/hps/recon/ecal/TriggerableDriver.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -0,0 +1,56 @@
+package org.hps.readout.ecal;
+
+import java.util.LinkedList;
+import java.util.Queue;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+
+/**
+ * A driver that accepts triggers from TriggerDriver.
+ * To implement, write your own processTrigger(), and call checkTrigger() somewhere in process().
+ * You might want to set your own default latency in your constructor.
+ * readoutDeltaT() and isLive() are meant to be overridden if you're doing something unusual.
+ * @author Sho Uemura <[log in to unmask]>
+ * @version $Id: TriggerableDriver.java,v 1.3 2013/03/20 01:03:32 meeg Exp $
+ */
+public abstract class TriggerableDriver extends Driver {
+
+    private Queue<Double> triggerTimestamps = new LinkedList<Double>();
+    protected double triggerDelay = 0.0; // [ns]
+
+    public void setTriggerDelay(double triggerDelay) {
+        this.triggerDelay = triggerDelay;
+    }
+
+    /**
+     * 
+     * @return time reference for hits written by this driver in response to a trigger
+     */
+    public double readoutDeltaT() {
+        return ClockSingleton.getTime() + triggerDelay;
+    }
+
+    @Override
+    protected void startOfData() {
+        TriggerDriver.addTriggerable(this);
+    }
+
+    protected abstract void processTrigger(EventHeader event);
+
+    protected void checkTrigger(EventHeader event) {
+        while (triggerTimestamps.peek() != null && ClockSingleton.getTime() >= triggerTimestamps.peek()) {
+            processTrigger(event);
+            triggerTimestamps.remove();
+        }
+    }
+
+    public void addTrigger() {
+        triggerTimestamps.add(ClockSingleton.getTime() + triggerDelay);
+    }
+
+    public boolean isLive() {
+        return true;
+    }
+    
+    public abstract int getTimestampType();
+}

java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio
ECalEvioReader.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/ECalEvioReader.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/ECalEvioReader.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -3,32 +3,18 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ConditionsDriver;
-import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannel;
-import org.hps.conditions.ecal.EcalChannel.DaqId;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.detector.identifier.Identifier;
-//import org.hps.conditions.deprecated.EcalConditions;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.jlab.coda.jevio.BaseStructure;
 import org.jlab.coda.jevio.BaseStructureHeader;
 import org.jlab.coda.jevio.CompositeData;
 import org.jlab.coda.jevio.EvioEvent;
 import org.jlab.coda.jevio.EvioException;
+import org.lcsim.detector.identifier.Identifier;
 import org.lcsim.event.EventHeader;
 import org.lcsim.event.RawTrackerHit;
 import org.lcsim.event.SimTrackerHit;
 import org.lcsim.event.base.BaseRawCalorimeterHit;
 import org.lcsim.event.base.BaseRawTrackerHit;
-import org.lcsim.geometry.Detector;
-import org.lcsim.geometry.Subdetector;
 import org.lcsim.lcio.LCIOConstants;
 
 /**
@@ -41,35 +27,9 @@
 
     private int bankTag = EventConstants.ECAL_PULSE_INTEGRAL_BANK_TAG;
     private Class hitClass = BaseRawCalorimeterHit.class;
-    
-    // FIXME: Hard-coded detector names.
-    private static String readoutName = "EcalHits";
-    private static String subdetectorName = "Ecal";
-    Detector detector;
-    Subdetector subDetector;
-    
-    static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null; 
 
     public ECalEvioReader() {
         hitCollectionName = "EcalReadoutHits";
-        
-        detector = DatabaseConditionsManager.getInstance().getDetectorObject();
-        subDetector = detector.getSubdetector(subdetectorName);
-        
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        helper = subDetector.getDetectorElement().getIdentifierHelper();
-        
-        System.out.println("You are now using the database conditions for ECalEvioReader.java");
-        // ID helper.
-//        helper = detector.getSubdetector("Ecal").getDetectorElement().getIdentifierHelper();
     }
 
     @Override
@@ -131,7 +91,7 @@
                 }
             }
         }
-//        String readoutName = ;
+        String readoutName = EcalConditions.getSubdetector().getReadout().getName();
         lcsimEvent.put(hitCollectionName, hits, hitClass, flags, readoutName);
 //        for (Object hit : hits) {
 //            System.out.println(((RawTrackerHit) hit).getIDDecoder().getIDDescription().toString());
@@ -166,44 +126,22 @@
                 if (debug) {
                     System.out.println("  channel=" + channel + "; nSamples=" + nSamples);
                 }
+                Long id = EcalConditions.daqToPhysicalID(crate, slot, channel);
 
-                
-                 long id = daqToGeometryId(crate, slot, channel);
-//                Long id = EcalConditions.daqToPhysicalID(crate, slot, channel);
-
-                 System.out.println("The long id is: " + id);
-                 
                 short[] adcValues = new short[nSamples];
                 for (int i = 0; i < nSamples; i++) {
                     adcValues[i] = cdata.getShort();
                 }
-//                if (id == null) {
-//                    System.out.printf("Crate %d, slot %d, channel %d not found in map\n", crate, slot, channel);
-//                } else {
-                    hits.add(new BaseRawTrackerHit(
-                    		0, 
-                    		id, 
-                    		adcValues, 
-                    		new ArrayList<SimTrackerHit>(), 
-                    		subDetector
-                    		    .getDetectorElement().findDetectorElement(new Identifier(id)).get(0)));
-//                }
+                if (id == null) {
+                    System.out.printf("Crate %d, slot %d, channel %d not found in map\n", crate, slot, channel);
+                } else {
+                    hits.add(new BaseRawTrackerHit(0, id, adcValues, new ArrayList<SimTrackerHit>(), EcalConditions.getSubdetector().getDetectorElement().findDetectorElement(new Identifier(id)).get(0)));
+                }
             }
         }
         return hits;
     }
 
-	private long daqToGeometryId(int crate, short slot, short channel) {
-		DaqId daqId = new DaqId(new int[]{crate,slot,channel});
-		 EcalChannel ecalChannel = channels.findChannel(daqId);
-		 if(ecalChannel == null) throw new RuntimeException("Daq Id not found.");
-		 int ix = ecalChannel.getX();
-		 int iy = ecalChannel.getY();
-		 GeometryId geometryId = new GeometryId(helper, new int[]{subDetector.getSystemID(),ix,iy});
-		 long id = geometryId.encode();
-		return id;
-	}
-
     private List<BaseRawTrackerHit> makePulseHits(CompositeData cdata, int crate) {
         List<BaseRawTrackerHit> hits = new ArrayList<BaseRawTrackerHit>();
         if (debug) {
@@ -230,7 +168,7 @@
                 if (debug) {
                     System.out.println("  channel=" + channel + "; npulses=" + npulses);
                 }
-                Long id = daqToGeometryId(crate, slot, channel);
+                Long id = EcalConditions.daqToPhysicalID(crate, slot, channel);
                 for (int k = 0; k < npulses; k++) {
                     short pulseNum = cdata.getByte();
                     int sampleCount = cdata.getNValue();
@@ -241,7 +179,7 @@
                     if (id == null) {
                         System.out.printf("Crate %d, slot %d, channel %d not found in map\n", crate, slot, channel);
                     } else {
-                        hits.add(new BaseRawTrackerHit(pulseNum, id, adcValues, new ArrayList<SimTrackerHit>(), subDetector.getDetectorElement().findDetectorElement(new Identifier(id)).get(0)));
+                        hits.add(new BaseRawTrackerHit(pulseNum, id, adcValues, new ArrayList<SimTrackerHit>(), EcalConditions.getSubdetector().getDetectorElement().findDetectorElement(new Identifier(id)).get(0)));
                     }
                 }
             }
@@ -275,7 +213,7 @@
                 if (debug) {
                     System.out.println("  channel=" + channel + "; npulses=" + npulses);
                 }
-                Long id = daqToGeometryId(crate, slot, channel);
+                Long id = EcalConditions.daqToPhysicalID(crate, slot, channel);
 
                 for (int k = 0; k < npulses; k++) {
                     short pulseTime = cdata.getShort();

java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio
ECalHitWriter.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/ECalHitWriter.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/ECalHitWriter.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -5,30 +5,17 @@
 import java.util.List;
 import java.util.Map;
 
-import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsUtil;
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-//import org.hps.conditions.deprecated.EcalConditions;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.jlab.coda.jevio.BaseStructure;
 import org.jlab.coda.jevio.CompositeData;
 import org.jlab.coda.jevio.DataType;
 import org.jlab.coda.jevio.EventBuilder;
 import org.jlab.coda.jevio.EvioBank;
 import org.jlab.coda.jevio.EvioException;
-import org.lcsim.conditions.ConditionsManager;
-import org.lcsim.detector.identifier.IIdentifier;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.detector.identifier.Identifier;
 import org.lcsim.event.EventHeader;
 import org.lcsim.event.RawCalorimeterHit;
 import org.lcsim.event.RawTrackerHit;
-import org.lcsim.geometry.Detector;
 import org.lcsim.geometry.IDDecoder;
-import org.lcsim.geometry.Subdetector;
 import org.lcsim.lcio.LCIOConstants;
 
 import static org.hps.evio.EventConstants.*;
@@ -43,38 +30,8 @@
     private String hitCollectionName = "EcalReadoutHits";
     private int mode = EventConstants.ECAL_PULSE_INTEGRAL_MODE;
 
-    // FIXME: Hard-coded detector names.
-    private static String subdetectorName = "Ecal";
-    Detector detector = null;
-    public Subdetector subDetector;
-    
-    static EcalConditions ecalConditions = null;
-    static IIdentifierHelper helper = null;
-    static EcalChannelCollection channels = null;     
-    
-    public ECalHitWriter() { 	
+    public ECalHitWriter() {
     }
-    
-    /** 
-     * Must be set when an object EcalHitWriter is created.
-     * @param detector (long)
-     */   
-    void setDetector(Detector detector) {
-    	
-        this.detector = detector;
-        subDetector = detector.getSubdetector(subdetectorName);
-        
-        // ECAL combined conditions object.
-        ecalConditions = ConditionsManager.defaultInstance()
-                .getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
-        
-        // List of channels.
-        channels = ecalConditions.getChannelCollection();
-        
-        helper = subDetector.getDetectorElement().getIdentifierHelper();
-        
-        System.out.println("You are now using the database conditions for ECalHitWriter.java");   
-    }
 
     public void setHitCollectionName(String hitCollectionName) {
         this.hitCollectionName = hitCollectionName;
@@ -129,8 +86,8 @@
         List<Object> topHits = new ArrayList<Object>();
         List<Object> bottomHits = new ArrayList<Object>();
         for (Object hit : rawCalorimeterHits) {
-//            Long daqID = EcalConditions.physicalToDaqID(getCellID(hit));
-            int crate = getCrate(getCellID(hit));
+            Long daqID = EcalConditions.physicalToDaqID(getCellID(hit));
+            int crate = EcalConditions.getCrate(daqID);
             if (crate == ECAL_BOTTOM_BANK_TAG) {
                 bottomHits.add(hit);
             } else {
@@ -193,7 +150,6 @@
 
     private long getCellID(Object hit) {
         if (RawCalorimeterHit.class.isInstance(hit)) {
-        	System.out.println("hit.getCellID() " + ((RawCalorimeterHit) hit).getCellID());
             return ((RawCalorimeterHit) hit).getCellID();
         } else if (RawTrackerHit.class.isInstance(hit)) {
             return ((RawTrackerHit) hit).getCellID();
@@ -207,7 +163,7 @@
         }
 
         // Get the ID decoder.
-        IDDecoder dec = subDetector.getIDDecoder();
+        IDDecoder dec = EcalConditions.getSubdetector().getIDDecoder();
 
         // Make a hit map; allow for multiple hits in a crystal.
         Map<Long, List<RawCalorimeterHit>> hitMap = new HashMap<Long, List<RawCalorimeterHit>>();
@@ -226,9 +182,9 @@
             dec.setID(id);
 //			System.out.println(dec.getIDDescription());
 //			System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
-//            Long daqID = EcalConditions.physicalToDaqID(id);
+            Long daqID = EcalConditions.physicalToDaqID(id);
 //			System.out.printf("physicalID %d, daqID %d\n", id, daqID);
-            int slot = getSlot(id);
+            int slot = EcalConditions.getSlot(daqID);
             if (slotMap.get(slot) == null) {
                 slotMap.put(slot, new ArrayList<Long>());
             }
@@ -249,7 +205,7 @@
             data.addN(nhits); // number of channels
             for (Long id : hitIDs) {
                 dec.setID(id);
-                int channel = getChannel(id);
+                int channel = EcalConditions.getChannel(EcalConditions.physicalToDaqID(id));
                 data.addUchar((byte) channel); // channel #
                 List<RawCalorimeterHit> channelHits = hitMap.get(id);
                 data.addN(channelHits.size()); // number of pulses
@@ -280,7 +236,7 @@
         }
 
         // Get the ID decoder.
-        IDDecoder dec = subDetector.getIDDecoder();
+        IDDecoder dec = EcalConditions.getSubdetector().getIDDecoder();
 
         // Make a hit map; allow for multiple hits in a crystal.
         Map<Long, List<RawTrackerHit>> hitMap = new HashMap<Long, List<RawTrackerHit>>();
@@ -299,9 +255,9 @@
             dec.setID(id);
 //			System.out.println(dec.getIDDescription());
 //			System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
-//            Long daqID = EcalConditions.physicalToDaqID(id);
+            Long daqID = EcalConditions.physicalToDaqID(id);
 //			System.out.printf("physicalID %d, daqID %d\n", id, daqID);
-            int slot = getSlot(id);
+            int slot = EcalConditions.getSlot(daqID);
             if (slotMap.get(slot) == null) {
                 slotMap.put(slot, new ArrayList<Long>());
             }
@@ -322,7 +278,7 @@
             data.addN(nhits); // number of channels
             for (Long id : hitIDs) {
                 dec.setID(id);
-                int channel = getChannel(id);
+                int channel = EcalConditions.getChannel(EcalConditions.physicalToDaqID(id));
                 data.addUchar((byte) channel); // channel #
                 List<RawTrackerHit> channelHits = hitMap.get(id);
                 data.addN(channelHits.size()); // number of pulses
@@ -357,7 +313,7 @@
         }
 
         // Get the ID decoder.
-        IDDecoder dec = subDetector.getIDDecoder();
+        IDDecoder dec = EcalConditions.getSubdetector().getIDDecoder();
 
         // Make a hit map; allow for multiple hits in a crystal.
         Map<Long, RawTrackerHit> hitMap = new HashMap<Long, RawTrackerHit>();
@@ -372,9 +328,9 @@
             dec.setID(id);
 //			System.out.println(dec.getIDDescription());
 //			System.out.printf("ix = %d, iy = %d\n", dec.getValue("ix"), dec.getValue("iy"));
-//            Long daqID = EcalConditions.physicalToDaqID(id);
+            Long daqID = EcalConditions.physicalToDaqID(id);
 //			System.out.printf("physicalID %d, daqID %d\n", id, daqID);
-            int slot = getSlot(id);
+            int slot = EcalConditions.getSlot(daqID);
             if (slotMap.get(slot) == null) {
                 slotMap.put(slot, new ArrayList<Long>());
             }
@@ -399,7 +355,7 @@
             data.addN(nhits); // number of channels
             for (Long id : hitIDs) {
                 dec.setID(id);
-                int channel = getChannel(id);
+                int channel = EcalConditions.getChannel(EcalConditions.physicalToDaqID(id));
                 data.addUchar((byte) channel); // channel #
                 RawTrackerHit hit = hitMap.get(id);
                 data.addN(hit.getADCValues().length); // number of samples
@@ -426,7 +382,7 @@
 
     @Override
     public void writeData(EventHeader event, EventHeader toEvent) {
-        String readoutName = ((org.lcsim.geometry.compact.Subdetector) subDetector).getReadout().getName();   
+        String readoutName = EcalConditions.getSubdetector().getReadout().getName();
         switch (mode) {
             case EventConstants.ECAL_WINDOW_MODE:
             case EventConstants.ECAL_PULSE_MODE:
@@ -445,49 +401,4 @@
                 break;
         }
     }
-    
- 
-    
-    /**
-     * Return crate number from cellID
-     * @param cellID (long)
-     * @return Crate number (int)
-     */
-    private int getCrate(long cellID) {
-        
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getCrate(helper, cellID);
-    }
-    
-    /**
-     * Return slot number from cellID
-     * @param cellID (long)
-     * @return Slot number (int)
-     */
-    private int getSlot(long cellID) {
-        EcalConditionsUtil util = new EcalConditionsUtil();
-
-        // Find the ECAL channel and return the crate number.
-        return util.getSlot(helper, cellID);         
-    }  
-    
-    private int getChannel(long cellID){
-      // Make an ID object from hit ID.
-      IIdentifier idd = new Identifier(cellID);
-    
-      // Get physical field values.
-      int system = helper.getValue(idd, "system");
-      int x = helper.getValue(idd, "ix");
-      int y = helper.getValue(idd, "iy");
-    
-      // Create an ID to search for in channel collection.
-      GeometryId geometryId = new GeometryId(helper, new int[] { system, x, y });
-            
-      // Get the channel data.
-      return channels.findChannel(geometryId).getChannelId(); 
-    
-    }
-    
 }

java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio
TestRunEvioToLcio.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunEvioToLcio.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunEvioToLcio.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -164,7 +164,6 @@
         // LCSim job manager.
         JobControlManager jobManager = new JobControlManager();
 
-        int conditionsRunNumber = 0;
         if (cl.hasOption("D")) {
             String[] steeringOptions = cl.getOptionValues("D");
             for (String def : steeringOptions) {
@@ -175,20 +174,12 @@
                 String key = s[0];
                 String value = s[1];
                 jobManager.addVariableDefinition(key, value);
-
-                if (key.equals("runNumber")) {
-                    conditionsRunNumber = Integer.parseInt(value);
-                }
             }
         }
 
         jobManager.setup(steeringStream);
         jobManager.configure();
 
-
-        // HACK: Try to get working with new database conditions system...
-        new org.hps.conditions.config.TestRunReadOnlyConfiguration(false).setup().load(detectorName, conditionsRunNumber);
-
         // LCSim event builder.
         LCSimEventBuilder eventBuilder = new LCSimTestRunEventBuilder();
         eventBuilder.setDetectorName(detectorName);
@@ -315,4 +306,4 @@
             System.out.println("closed writer");
         }
     }
-}
+}
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio
TestRunReconToEvio.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunReconToEvio.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunReconToEvio.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -2,16 +2,12 @@
 
 import java.io.IOException;
 
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.ecal.EcalConditions;
 import org.jlab.coda.jevio.DataType;
 import org.jlab.coda.jevio.EventBuilder;
 import org.jlab.coda.jevio.EventWriter;
 import org.jlab.coda.jevio.EvioBank;
 import org.jlab.coda.jevio.EvioException;
-import org.lcsim.conditions.ConditionsManager;
 import org.lcsim.event.EventHeader;
-import org.lcsim.geometry.Detector;
 import org.lcsim.util.Driver;
 
 /**
@@ -30,17 +26,9 @@
 	private int eventsWritten = 0;
 	ECalHitWriter ecalWriter = null;
 	SVTHitWriter svtWriter = null;
-	
-	Detector detector = null;
 
 	public TestRunReconToEvio() {
 	}
-	
-    @Override
-    public void detectorChanged(Detector detector) {    	
-    	// set the detector
-        this.detector = detector;
-    }
 
 	public void setEvioOutputFile(String evioOutputFile) {
 		this.evioOutputFile = evioOutputFile;
@@ -61,7 +49,6 @@
 		}
 
 		ecalWriter = new ECalHitWriter();
-		ecalWriter.setDetector(detector);
 		ecalWriter.setHitCollectionName(rawCalorimeterHitCollectionName);
 
 		svtWriter = new SVTHitWriter();

java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio
TestRunTriggeredReconToEvio.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunTriggeredReconToEvio.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunTriggeredReconToEvio.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -5,9 +5,8 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Queue;
-import org.hps.conditions.DatabaseConditionsManager;
 import org.hps.conditions.deprecated.CalibrationDriver;
-import org.hps.conditions.ecal.EcalConditions;
+import org.hps.conditions.deprecated.EcalConditions;
 import org.hps.readout.ecal.ReadoutTimestamp;
 import org.hps.readout.ecal.TriggerDriver;
 import org.hps.readout.ecal.TriggerableDriver;
@@ -17,7 +16,6 @@
 import org.jlab.coda.jevio.EvioBank;
 import org.jlab.coda.jevio.EvioException;
 import org.lcsim.event.EventHeader;
-import org.lcsim.geometry.Detector;
 import org.lcsim.util.Driver;
 
 /**
@@ -41,19 +39,10 @@
     TriggerDataWriter triggerWriter = null;
     List<HitWriter> writers = null;
     private int ecalMode = EventConstants.ECAL_PULSE_INTEGRAL_MODE;
-    
-    Detector detector;
 
     public TestRunTriggeredReconToEvio() {
         setTriggerDelay(0);
     }
-    
-    @Override
-    public void detectorChanged(Detector detector) {    	
-    	//ecalWriter.setDetector(detector);
-        if(detector == null) System.out.println("detectorChanged, Detector == null");
-        else System.out.println("detectorChanged, Detector != null");
-    }
 
     public void setEcalMode(int ecalMode) {
         this.ecalMode = ecalMode;
@@ -86,17 +75,12 @@
         }
 
         writePrestartEvent();
-        this.detector = DatabaseConditionsManager.getInstance().getDetectorObject();
 
         writers = new ArrayList<HitWriter>();
 
         ecalWriter = new ECalHitWriter();
-        if(detector == null) System.out.println("Detector == null");
-        else System.out.println("Detector != null");
-        //ecalWriter.setDetector(detector);
         ecalWriter.setMode(ecalMode);
         ecalWriter.setHitCollectionName(rawCalorimeterHitCollectionName);
-        ecalWriter.setDetector(detector);
         writers.add(ecalWriter);
 
         svtWriter = new SVTHitWriter();

java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio
TestRunTriggeredReconToLcio.java 1009 -> 1010
--- java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunTriggeredReconToLcio.java	2014-09-11 22:07:20 UTC (rev 1009)
+++ java/branches/hps_java_trunk_HPSJAVA-251/evio/src/main/java/org/hps/evio/TestRunTriggeredReconToLcio.java	2014-09-12 02:08:45 UTC (rev 1010)
@@ -1,19 +1,18 @@
 package org.hps.evio;
 
 import hep.physics.event.generator.MCEvent;
-
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Queue;
-
 import org.hps.conditions.deprecated.CalibrationDriver;
 import org.hps.conditions.deprecated.QuietBaseLCSimEvent;
 import org.hps.readout.ecal.ClockSingleton;
 import org.hps.readout.ecal.ReadoutTimestamp;
 import org.hps.readout.ecal.TriggerDriver;
+import org.hps.readout.ecal.TriggerableDriver;
 import org.lcsim.event.EventHeader;
 import org.lcsim.event.LCRelation;
 import org.lcsim.event.MCParticle;
@@ -28,8 +27,10 @@
  * the test run.
  *
  * @author Jeremy McCormick <[log in to unmask]>
+ * @version $Id: TestRunTriggeredReconToLcio.java 779 2014-07-16 16:24:34Z
+ * omoreno $
  */
-public class TestRunTriggeredReconToLcio extends Driver {
+public class TestRunTriggeredReconToLcio extends TriggerableDriver {
 
     String rawCalorimeterHitCollectionName = "EcalReadoutHits";
     String outputFile = "TestRunData.slcio";
@@ -49,15 +50,19 @@
     List<MCParticle> mcParticles = null;
     List<SimTrackerHit> trackerHits = null;
     List<SimCalorimeterHit> ecalHits = null;
+    List<SimTrackerHit> ecalScoringPlaneHits = null;
     //MC collections from the last 500n'th event (trident or preselected trigger event)
     List<MCParticle> triggerMCParticles = null;
     List<SimTrackerHit> triggerTrackerHits = null;
     List<SimCalorimeterHit> triggerECalHits = null;
+    List<SimTrackerHit> triggerECalScoringPlaneHits = null;
     static final String ecalCollectionName = "EcalHits";
     static final String trackerCollectionName = "TrackerHits";
     private String relationCollectionName = "SVTTrueHitRelations";
+    String ecalScoringPlaneHitsCollectionName = "TrackerHitsECal";
 
     public TestRunTriggeredReconToLcio() {
+        setTriggerDelay(0);
     }
 
     public void setEcalMode(int ecalMode) {
@@ -91,6 +96,7 @@
 
     @Override
     protected void startOfData() {
+        super.startOfData();
         writers = new ArrayList<HitWriter>();
 
         ecalWriter = new ECalHitWriter();
@@ -128,39 +134,28 @@
             mcParticles = event.getMCParticles();
             ecalHits = event.getSimCalorimeterHits(ecalCollectionName);
             trackerHits = event.getSimTrackerHits(trackerCollectionName);
+            if (event.hasCollection(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName)) {
+                ecalScoringPlaneHits = event.get(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName);
+            }
         }
         if (ClockSingleton.getClock() % triggerSpacing == 0) {
             if (event.hasCollection(MCParticle.class)) {
                 triggerMCParticles = event.getMCParticles();
                 triggerECalHits = event.getSimCalorimeterHits(ecalCollectionName);
                 triggerTrackerHits = event.getSimTrackerHits(trackerCollectionName);
+                if (event.hasCollection(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName)) {
+                    triggerECalScoringPlaneHits = event.get(SimTrackerHit.class, ecalScoringPlaneHitsCollectionName);
+                }
             } else {
                 triggerMCParticles = null;
                 triggerECalHits = null;
                 triggerTrackerHits = null;
+                triggerECalScoringPlaneHits = null;
             }
         }
 
+        checkTrigger(event);
 
-        if (TriggerDriver.triggerBit()) {
-            EventHeader lcsimEvent = new QuietBaseLCSimEvent(CalibrationDriver.runNumber(), event.getEventNumber(), event.getDetectorName());
-            events.add(lcsimEvent);
-            System.out.println("Creating LCIO event " + eventNum);
-            if (triggerMCParticles == null || triggerMCParticles.isEmpty()) {
-                lcsimEvent.put(MCEvent.MC_PARTICLES, mcParticles);
-                lcsimEvent.put(ecalCollectionName, ecalHits, SimCalorimeterHit.class, 0xe0000000);
-                lcsimEvent.put(trackerCollectionName, trackerHits, SimTrackerHit.class, 0xc0000000);
-                System.out.println("Adding " + mcParticles.size() + " MCParticles, " + ecalHits.size() + " SimCalorimeterHits, " + trackerHits.size() + " SimTrackerHits");
-            } else {
-                lcsimEvent.put(MCEvent.MC_PARTICLES, triggerMCParticles);
-                lcsimEvent.put(ecalCollectionName, triggerECalHits, SimCalorimeterHit.class, 0xe0000000);
-                lcsimEvent.put(trackerCollectionName, triggerTrackerHits, SimTrackerHit.class, 0xc0000000);
-                System.out.println("Adding " + triggerMCParticles.size() + " MCParticles, " + triggerECalHits.size() + " SimCalorimeterHits, " + triggerTrackerHits.size() + " SimTrackerHits");
-            }
-            lcsimEvent.put(ReadoutTimestamp.collectionName, event.get(ReadoutTimestamp.class, ReadoutTimestamp.collectionName));
-            ++eventNum;
-        }
-
         writerLoop:
         for (HitWriter hitWriter : writers) {
             if (hitWriter.hasData(event)) {
@@ -215,4 +210,37 @@
             }
         }
     }
-}
\ No newline at end of file
+
+    @Override
+    protected void processTrigger(EventHeader event) {
+        EventHeader lcsimEvent = new QuietBaseLCSimEvent(CalibrationDriver.runNumber(), event.getEventNumber(), event.getDetectorName());
+        events.add(lcsimEvent);
+        System.out.println("Creating LCIO event " + eventNum);
+        if (triggerMCParticles == null || triggerMCParticles.isEmpty()) {
+            lcsimEvent.put(MCEvent.MC_PARTICLES, mcParticles);
+            lcsimEvent.put(ecalCollectionName, ecalHits, SimCalorimeterHit.class, 0xe0000000);
+            lcsimEvent.put(trackerCollectionName, trackerHits, SimTrackerHit.class, 0xc0000000);
+            System.out.println("Adding " + mcParticles.size() + " MCParticles, " + ecalHits.size() + " SimCalorimeterHits, " + trackerHits.size() + " SimTrackerHits");
+            if (ecalScoringPlaneHits != null) {
+                lcsimEvent.put(ecalScoringPlaneHitsCollectionName, ecalScoringPlaneHits, SimTrackerHit.class, 0);
+                System.out.println("Adding " + ecalScoringPlaneHits.size() + " ECalTrackerHits");
+            }
+        } else {
+            lcsimEvent.put(MCEvent.MC_PARTICLES, triggerMCParticles);
+            lcsimEvent.put(ecalCollectionName, triggerECalHits, SimCalorimeterHit.class, 0xe0000000);
+            lcsimEvent.put(trackerCollectionName, triggerTrackerHits, SimTrackerHit.class, 0xc0000000);
+            System.out.println("Adding " + triggerMCParticles.size() + " MCParticles, " + triggerECalHits.size() + " SimCalorimeterHits, " + triggerTrackerHits.size() + " SimTrackerHits");
+            if (triggerECalScoringPlaneHits != null) {
+                lcsimEvent.put(ecalScoringPlaneHitsCollectionName, triggerECalScoringPlaneHits, SimTrackerHit.class, 0);
+                System.out.println("Adding " + triggerECalScoringPlaneHits.size() + " ECalTrackerHits");
+            }
+        }
+        lcsimEvent.put(ReadoutTimestamp.collectionName, event.get(ReadoutTimestamp.class, ReadoutTimestamp.collectionName));
+        ++eventNum;
+    }
+
+    @Override
+    public int getTimestampType() {
+        return ReadoutTimestamp.SYSTEM_TRIGGERTIME;
+    }
+}
SVNspam 0.1