Commit in java/branches/hps_java_trunk_HPSJAVA-255 on MAIN
analysis/pom.xml+1-61243 -> 1244
analysis/src/main/java/org/hps/analysis/examples/TrackAnalysis.java+40-671243 -> 1244
conditions/pom.xml+1-61243 -> 1244
conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java+15-421243 -> 1244
                                               /SvtDetectorSetup.java+1-171243 -> 1244
conditions/src/main/resources/org/hps/conditions/config/conditions_dev.xml+32-321243 -> 1244
conditions/src/test/java/org/hps/conditions/ConditionsDriverTest.java+1-11243 -> 1244
conditions/src/test/java/org/hps/conditions/beam/BeamCurrentTest.java+1-11243 -> 1244
conditions/src/test/java/org/hps/conditions/ecal/PhysicalToGainTest.java+4-11243 -> 1244
datacat/pom.xml+3-81243 -> 1244
detector-data/pom.xml+1-61243 -> 1244
distribution/pom.xml+15-101243 -> 1244
ecal-readout-sim/pom.xml+1-61243 -> 1244
ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java+388-3781243 -> 1244
                                                   /NeutralPionTriggerDriver.java+1027-10271243 -> 1244
ecal-recon/pom.xml+14-111243 -> 1244
ecal-recon/src/main/java/org/hps/recon/ecal/EcalClusterIC.java+246-2181243 -> 1244
                                           /HPSEcalClusterIC.java+162-251243 -> 1244
evio/pom.xml+1-11243 -> 1244
integration-tests/pom.xml+35-61243 -> 1244
integration-tests/src/test/java/org/hps/EcalReadoutSimTest.java+4-31243 -> 1244
                                       /MCReconTest.java-1231243 removed
                                       /ReadoutNoPileupTest.java+4-11243 -> 1244
                                       /ReadoutToEvioTest.java+31243 -> 1244
                                       /ReadoutToLcioTest.java+4-11243 -> 1244
                                       /TestRunReadoutToEvioTest.java+4-11243 -> 1244
monitoring-app/pom.xml+1-11243 -> 1244
monitoring-app/src/main/java/org/hps/monitoring/gui/JobSettingsPanel.java+1-501243 -> 1244
                                                   /MonitoringApplication.java+49-531243 -> 1244
                                                   /PlotInfoWindow.java+168-351243 -> 1244
monitoring-app/src/main/java/org/hps/monitoring/plotting/MonitoringAnalysisFactory.java-121243 -> 1244
                                                        /MonitoringPlotFactory.java+2-21243 -> 1244
monitoring-drivers/pom.xml+1-11243 -> 1244
monitoring-drivers/src/main/java/org/hps/monitoring/drivers/example/SimplePlotDriver.java-461243 removed
parent/pom.xml+91243 -> 1244
plugin/pom.xml+17-31243 -> 1244
plugin/src/main/java/org/hps/plugin/HPSPlugin.java+1-11243 -> 1244
pom.xml+22-541243 -> 1244
recon/pom.xml+1-11243 -> 1244
recon/src/main/java/org/hps/recon/particle/HpsReconParticleDriver.java+7-71243 -> 1244
                                          /ReconParticleDriver.java+90-511243 -> 1244
record-util/pom.xml+1-11243 -> 1244
record-util/src/main/java/org/hps/record/AbstractRecordQueue.java+1-11243 -> 1244
record-util/src/main/java/org/hps/record/composite/CompositeLoop.java+14-211243 -> 1244
                                                  /CompositeLoopAdapter.java+11243 -> 1244
                                                  /CompositeLoopConfiguration.java+11243 -> 1244
                                                  /EventProcessingThread.java+21-231243 -> 1244
steering-files/pom.xml+1-51243 -> 1244
steering-files/src/main/resources/org/hps/steering/monitoring/ExampleEcalMonitoringPlots.lcsim-371243 removed
                                                             /ExampleMonitoringPlots.lcsim+9-31243 -> 1244
steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineNoPileupRecon.lcsim+4-21243 -> 1244
                                                        /HPS2014OfflineRecon.lcsim+3-21243 -> 1244
                                                        /HPS2014OfflineTruthRecon.lcsim+4-21243 -> 1244
steering-files/src/main/resources/org/hps/steering/users/mgraham/DataQualityMonitor.lcsim+33-121243 -> 1244
                                                                /DataQualityMonitorOnRecon.lcsim+7-11243 -> 1244
steering-files/src/main/resources/org/hps/steering/users/phansson/HPSTrackingDefaults.lcsim+38-71243 -> 1244
tracking/pom.xml+2-121243 -> 1244
tracking/src/main/java/org/hps/recon/tracking/MaterialSupervisor.java+127-651243 -> 1244
                                             /TrackerDigiDriver.java+4-21243 -> 1244
tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblFitter.java+11-51243 -> 1244
                                                 /HpsGblRefitter.java+4-41243 -> 1244
                                                 /MilleBinary.java+144-11243 -> 1244
tracking/src/main/java/org/hps/recon/tracking/nobfield/StraightTrack.java+26-231243 -> 1244
                                                      /StraightTrackFinder.java+129-611243 -> 1244
                                                      /TrackChecker.java+51243 -> 1244
users/pom.xml+2-11243 -> 1244
users/src/main/java/org/hps/users/celentan/StripChartTest.java+1-11243 -> 1244
users/src/main/java/org/hps/users/jeremym/TestRunReconDriver.java+5-21243 -> 1244
users/src/main/java/org/hps/users/luca/FEETrigger.java+5-51243 -> 1244
                                      /ReconDataPos.java+29-41243 -> 1244
                                      /TriggerAna.java+18-1101243 -> 1244
                                      /mycluster3.java+21-251243 -> 1244
users/src/main/java/org/hps/users/sfegan/HPSECalTestFegan.java+1-11243 -> 1244
util/pom.xml+1-111243 -> 1244
+3050-2765
3 removed + 71 modified, total 74 files
Merge trunk changes through r1243 into hps_java_trunk_HPSJAVA-255.

java/branches/hps_java_trunk_HPSJAVA-255/analysis
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/analysis/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/analysis/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,29 +1,24 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-analysis</artifactId>
     <name>analysis</name>
-    <description>HPS analysis code</description>
-    
+    <description>common analysis code</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>
-    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/analysis/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/analysis/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/analysis/</developerConnection>
     </scm>
-    
     <dependencies>
         <dependency>
             <groupId>org.hps</groupId>
             <artifactId>hps-recon</artifactId>
         </dependency>
     </dependencies>
-    
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/analysis/src/main/java/org/hps/analysis/examples
TrackAnalysis.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/analysis/src/main/java/org/hps/analysis/examples/TrackAnalysis.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/analysis/src/main/java/org/hps/analysis/examples/TrackAnalysis.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -35,6 +35,7 @@
 import org.lcsim.event.Track;
 import org.lcsim.event.TrackerHit;
 import org.lcsim.fit.helicaltrack.HelicalTrack2DHit;
+import org.lcsim.fit.helicaltrack.HelicalTrack3DHit;
 import org.lcsim.fit.helicaltrack.HelicalTrackCross;
 import org.lcsim.fit.helicaltrack.HelicalTrackHit;
 import org.lcsim.fit.helicaltrack.HelicalTrackStrip;
@@ -96,25 +97,26 @@
         _hasLayerOne = false;
         //  Loop over the hits on the track and make sure we have HelicalTrackHits (which contain the MC particle)
         for (TrackerHit hit : trk.getTrackerHits()) {
+            HelicalTrackHit htc = (HelicalTrackHit) hit;
+            if (htc.Detector().equals("BeamSpot"))
+                continue;
+
             //  get the set of MCParticles associated with this hit and update the hit count for each MCParticle
             Set<MCParticle> mclist = hittomc.allFrom(hit);
             for (MCParticle mcp : mclist) {
                 Integer mchits = 0;
-                if (mcmap.containsKey(mcp)) {
+                if (mcmap.containsKey(mcp))
                     mchits = mcmap.get(mcp);
-                }
                 mchits++;
                 mcmap.put(mcp, mchits);
             }
 
-//            HelicalTrackHit htc = (HelicalTrackHit) hit;
-            if (hit instanceof HelicalTrackCross) {
+            if (hit instanceof HelicalTrackCross)
                 countHit((HelicalTrackCross) hit);
-            } else if (hit instanceof HelicalTrack2DHit) {
+            else if (hit instanceof HelicalTrack2DHit)
                 countHit((HelicalTrack2DHit) hit);
-            } else {
+            else
                 countHit(hit, rthtosimhit, hittostrip, hittorotated);
-            }
         }
 
         //  Find the MCParticle that has the most hits on the track
@@ -128,9 +130,8 @@
             }
         }
 
-        if (nbest > 0) {
+        if (nbest > 0)
             _mcp = mcbest;
-        }
         _purity = (double) nbest / (double) _nhits;
         _nbadhits = _nhits - nbest;
 
@@ -145,32 +146,25 @@
             }
         }
 
-        if (nbestAll > 0) {
+        if (nbestAll > 0)
             _mcpNew = mcbestAll;
-        }
         _purityNew = (double) nbestAll / (double) _nhitsNew;
         _nbadhitsNew = _nhitsNew - nbestAll;
 
-        for (TrackerHit hit : trk.getTrackerHits()) {
-            if (hit instanceof HelicalTrackCross) {
+        for (TrackerHit hit : trk.getTrackerHits())
+            if (hit instanceof HelicalTrackCross)
                 checkForBadHit((HelicalTrackCross) hit);
-            }
-        }
 
-        if (_nAxialhits > 0) {
-            if (mcmapAxial.containsKey(_mcpNew)) {
+        if (_nAxialhits > 0)
+            if (mcmapAxial.containsKey(_mcpNew))
                 _nbadAxialhits = _nAxialhits - mcmapAxial.get(_mcpNew);
-            } else {
+            else
                 _nbadAxialhits = _nAxialhits;
-            }
-        }
-        if (_nZhits > 0) {
-            if (mcmapZ.containsKey(_mcpNew)) {
+        if (_nZhits > 0)
+            if (mcmapZ.containsKey(_mcpNew))
                 _nbadZhits = _nZhits - mcmapZ.get(_mcpNew);
-            } else {
+            else
                 _nbadZhits = _nZhits;
-            }
-        }
     }
 
     private void countHit(HelicalTrackCross cross) {
@@ -178,9 +172,8 @@
 
         for (HelicalTrackStrip cl : clusterlist) {
             int layer = cl.layer();
-            if (layer == 1) {
+            if (layer == 1)
                 _hasLayerOne = true;
-            }
 
             _nStripHitsPerLayer[layer - 1] = cl.rawhits().size();
             _hitLocationPerLayer.put(layer, clusterPosition(cl));
@@ -191,30 +184,26 @@
             if (axdotu > 0.5) {
                 isAxial = true;
                 _nAxialhits++;
-            } else {
+            } else
                 _nZhits++;
-            }
             List<MCParticle> mcPartList = cl.MCParticles();
             _nMCHitsPerLayer[layer - 1] = mcPartList.size();
             for (MCParticle mcp : mcPartList) {
                 Integer mchits = 0;
-                if (mcmapAll.containsKey(mcp)) {
+                if (mcmapAll.containsKey(mcp))
                     mchits = mcmapAll.get(mcp);
-                }
                 mchits++;
                 mcmapAll.put(mcp, mchits);
                 if (isAxial) {
                     Integer mchitsAxial = 0;
-                    if (mcmapAxial.containsKey(mcp)) {
+                    if (mcmapAxial.containsKey(mcp))
                         mchitsAxial = mcmapAxial.get(mcp);
-                    }
                     mchitsAxial++;
                     mcmapAxial.put(mcp, mchitsAxial);
                 } else {
                     Integer mchitsZ = 0;
-                    if (mcmapZ.containsKey(mcp)) {
+                    if (mcmapZ.containsKey(mcp))
                         mchitsZ = mcmapZ.get(mcp);
-                    }
                     mchitsZ++;
                     mcmapZ.put(mcp, mchitsZ);
                 }
@@ -236,21 +225,18 @@
 //                    System.out.println(rawHit.getCellID());
                 IIdentifier id = new Identifier(rawHit.getCellID());
                 int newLayer = SvtUtils.getInstance().getHelper().getValue(id, "layer");
-                if (layer != -1 && layer != newLayer) {
+                if (layer != -1 && layer != newLayer)
                     System.out.format("TrackerHit has hits from multiple layers: %d and %d\n", layer, newLayer);
-                }
                 layer = newLayer;
                 int newModule = SvtUtils.getInstance().getHelper().getValue(id, "module");
-                if (module != -1 && module != newModule) {
+                if (module != -1 && module != newModule)
                     System.out.format("TrackerHit has hits from multiple modules: %d and %d\n", module, newModule);
-                }
                 module = newModule;
 //                    System.out.println(SvtUtils.getInstance().getHelper().getValue(id, "strip"));
             }
 
-            if (layer == 1) {
+            if (layer == 1)
                 _hasLayerOne = true;
-            }
             DiagonalizedCovarianceMatrix covariance = new DiagonalizedCovarianceMatrix(cl);
             _nStripHitsPerLayer[layer - 1] = cl.getRawHits().size();
             _hitLocationPerLayer.put(layer, new BasicHep3Vector(hit.getPosition()));
@@ -262,41 +248,35 @@
             if (axdotu > 0.5) {
                 isAxial = true;
                 _nAxialhits++;
-            } else {
+            } else
                 _nZhits++;
-            }
             //  get the set of MCParticles associated with this hit and update the hit count for each MCParticle
 
             Set<MCParticle> mcPartList = new HashSet<MCParticle>();
             for (RawTrackerHit rawHit : rawHits) {
                 Set<SimTrackerHit> simhits = (Set<SimTrackerHit>) rthtosimhit.allFrom(rawHit);
-                for (SimTrackerHit simhit : simhits) {
-                    if (simhit != null && simhit.getMCParticle() != null) {
+                for (SimTrackerHit simhit : simhits)
+                    if (simhit != null && simhit.getMCParticle() != null)
                         mcPartList.add(simhit.getMCParticle());
-                    }
-                }
             }
 //            System.out.println("MCParticle count: " + mcPartList.size());
             _nMCHitsPerLayer[layer - 1] = mcPartList.size();
             for (MCParticle mcp : mcPartList) {
                 Integer mchits = 0;
-                if (mcmapAll.containsKey(mcp)) {
+                if (mcmapAll.containsKey(mcp))
                     mchits = mcmapAll.get(mcp);
-                }
                 mchits++;
                 mcmapAll.put(mcp, mchits);
                 if (isAxial) {
                     Integer mchitsAxial = 0;
-                    if (mcmapAxial.containsKey(mcp)) {
+                    if (mcmapAxial.containsKey(mcp))
                         mchitsAxial = mcmapAxial.get(mcp);
-                    }
                     mchitsAxial++;
                     mcmapAxial.put(mcp, mchitsAxial);
                 } else {
                     Integer mchitsZ = 0;
-                    if (mcmapZ.containsKey(mcp)) {
+                    if (mcmapZ.containsKey(mcp))
                         mchitsZ = mcmapZ.get(mcp);
-                    }
                     mchitsZ++;
                     mcmapZ.put(mcp, mchitsZ);
                 }
@@ -312,15 +292,13 @@
         boolean isAxial = true;
         for (MCParticle mcp : mcPartList) {
             Integer mchits = 0;
-            if (mcmapAll.containsKey(mcp)) {
+            if (mcmapAll.containsKey(mcp))
                 mchits = mcmapAll.get(mcp);
-            }
             mchits++;
             mcmapAll.put(mcp, mchits);
             Integer mchitsAxial = 0;
-            if (mcmapAxial.containsKey(mcp)) {
+            if (mcmapAxial.containsKey(mcp))
                 mchitsAxial = mcmapAxial.get(mcp);
-            }
             mchitsAxial++;
             mcmapAxial.put(mcp, mchitsAxial);
         }
@@ -334,9 +312,8 @@
                 badHitList.add(cl.layer());
                 badhits.put(_mcpNew, cross);
             }
-            if (cl.MCParticles().size() > 1) {
+            if (cl.MCParticles().size() > 1)
                 sharedHitList.add(cl.layer());
-            }
         }
     }
 
@@ -437,18 +414,14 @@
         public DiagonalizedCovarianceMatrix(TrackerHit hit) {
             SymmetricMatrix cov = new SymmetricMatrix(3, hit.getCovMatrix(), true);
             RealMatrix covMatrix = new Array2DRowRealMatrix(3, 3);
-            for (int i = 0; i < 3; i++) {
-                for (int j = 0; j < 3; j++) {
+            for (int i = 0; i < 3; i++)
+                for (int j = 0; j < 3; j++)
                     covMatrix.setEntry(i, j, cov.e(i, j));
-                }
-            }
             EigenDecomposition decomposed = new EigenDecomposition(covMatrix);
             BasicHep3Matrix localToGlobal = new BasicHep3Matrix();
-            for (int i = 0; i < 3; i++) {
-                for (int j = 0; j < 3; j++) {
+            for (int i = 0; i < 3; i++)
+                for (int j = 0; j < 3; j++)
                     localToGlobal.setElement(i, j, decomposed.getV().getEntry(i, j));
-                }
-            }
 //            SymmetricMatrix localToGlobal = decomposed.getV().operate(new ArrayRealVector(3))
             {
                 double eigenvalue = decomposed.getRealEigenvalue(0);

java/branches/hps_java_trunk_HPSJAVA-255/conditions
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/conditions/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/conditions/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -3,7 +3,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-conditions</artifactId>
     <name>conditions</name>
-    <description>HPS conditions framework</description>
+    <description>extensions to the org.lcsim conditions framework</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
@@ -35,11 +35,6 @@
     </build>
     <dependencies>
         <dependency>
-            <groupId>org.lcsim</groupId>
-            <artifactId>lcsim-event-processing</artifactId>
-            <version>${lcsimVersion}</version>
-        </dependency>
-        <dependency>
             <groupId>org.hps</groupId>
             <artifactId>hps-util</artifactId>
         </dependency>

java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/java/org/hps/conditions/svt
SvtDaqMapping.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,7 @@
 package org.hps.conditions.svt;
 
+import org.lcsim.detector.tracker.silicon.HpsSiSensor;
+
 import org.hps.conditions.AbstractConditionsObject;
 import org.hps.conditions.ConditionsObjectCollection;
 import org.hps.util.Pair;
@@ -35,45 +37,16 @@
          * @param moduleNumber The module number (needed to identify layer's 4-6)
          * @return The DAQ pair for the half and layer number or null if does not exist.
          */
-        Pair<Integer, Integer> getDaqPair(String SvtHalf, int layerNumber, int moduleNumber) {
+        Pair<Integer, Integer> getDaqPair(HpsSiSensor sensor) {
         	
+        	String svtHalf = sensor.isTopLayer() ? TOP_HALF : BOTTOM_HALF;
         	for (SvtDaqMapping object : this.getObjects()) {
+        		
+        		if(svtHalf.equals(object.getSvtHalf()) 
+        				&& object.getLayerNumber() == sensor.getLayerNumber()
+        				&& object.getSide().equals(sensor.getSide())) {
                 
-        		if (SvtHalf.equals(object.getSvtHalf()) && object.getLayerNumber() == layerNumber) {
-                
-        			// If the sensor belongs to the first three layers of the SVT
-        			// and the detector layer and SVT half match, no further searching
-        			// is required.
-        			if(layerNumber <= 6){
-                		return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
-                	} 
-                	
-        			// If the sensor belongs to layers 4-6, then find the matching
-        			// DAQ pair by looking at combinations of FEB hybrid ID's and module
-        			// numbers.  At the moment, it is assumed that odd SVT layers will 
-        			// be connected to even FEB hybrid channels and even SVT layers to odd
-        			// FEB hybrid channels.
-        			// TODO: Changes should be made to HpsSiSensor that will allow this
-        			//		 portion of the matching to be greatly simplified.
-                	if(SvtHalf.equals(TOP_HALF)){
-                		if(layerNumber%2 != 0 
-                				&& ((object.getFebHybridID() == 0 && moduleNumber == 0) 
-                						|| object.getFebHybridID() == 2 && moduleNumber == 2)){
-                			return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
-                		} else if(layerNumber %2 == 0 &&((object.getFebHybridID() == 1 && moduleNumber == 0)
-                				|| object.getFebHybridID() == 3 && moduleNumber == 2)) { 
-                			return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
-                		}
-                	} else if(SvtHalf.equals(BOTTOM_HALF)){ 
-                		if(layerNumber%2 != 0 
-                				&& ((object.getFebHybridID() == 0 && moduleNumber == 1) 
-                						|| object.getFebHybridID() == 2 && moduleNumber == 3)){
-                			return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
-                		} else if(layerNumber %2 == 0 &&((object.getFebHybridID() == 1 && moduleNumber == 1)
-                				|| object.getFebHybridID() == 3 && moduleNumber == 3)) { 
-                			return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
-                		}
-                	}
+        			return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
                 } 
             }
             return null;
@@ -125,12 +98,12 @@
                 buff.append("    ");
             	buff.append(object.getFebHybridID());
                 buff.append("    ");
-            	buff.append(object.getHybridID());
-                buff.append("    ");
                 buff.append(object.getSvtHalf());
                 buff.append("    ");
                 buff.append(String.format("%-2d", object.getLayerNumber()));
                 buff.append("    ");
+                buff.append(object.getSide());
+                buff.append("    ");
                 buff.append(object.getOrientation());
                 buff.append("    ");
                 buff.append('\n');
@@ -147,10 +120,6 @@
     	return getFieldValue("feb_hybrid_id");
     }
     
-    public int getHybridID() { 
-    	return getFieldValue("hybrid_id");
-    }
-    
     public String getSvtHalf() {
         return getFieldValue("svt_half");
     }
@@ -158,6 +127,10 @@
     public int getLayerNumber() {
         return getFieldValue("layer");
     }
+    
+    public String getSide(){
+    	return getFieldValue("side");
+    }
 
     public String getOrientation() { 
     	return getFieldValue("orientation");

java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/java/org/hps/conditions/svt
SvtDetectorSetup.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/java/org/hps/conditions/svt/SvtDetectorSetup.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/java/org/hps/conditions/svt/SvtDetectorSetup.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -39,24 +39,8 @@
             // Reset possible existing conditions data on sensor.
             sensor.reset();
 
-            // Get the layer number.  The layer number will range from 1-12;
-            int layerNumber = sensor.getLayerNumber();
-            
-            // Get the module ID number.  The sensors in the first three layers
-            // of the SVT are assigned a module ID = 0 if they are in the top 
-            // volume and 1 if they are on the bottom.  For layers 4-6, the 
-            // assigned module ID is 0 and 2 for top and 1 and 3 for bottom
-            // depending on whether the sensor is on the hole or slot side of
-            // the half-module.
-            int moduleNumber = sensor.getModuleNumber();
-
             // Get DAQ pair (FEB ID, FEB Hybrid ID) corresponding to this sensor
-            Pair<Integer, Integer> daqPair = null;
-            String SvtHalf = SvtDaqMappingCollection.TOP_HALF;
-            if (sensor.isBottomLayer()) {
-                SvtHalf = SvtDaqMappingCollection.BOTTOM_HALF;
-            }
-            daqPair = daqMap.getDaqPair(SvtHalf, layerNumber, moduleNumber);
+            Pair<Integer, Integer> daqPair = daqMap.getDaqPair(sensor);
             if (daqPair == null) {
                 throw new RuntimeException("Failed to find DAQ pair for sensor: " + sensor.getName());
             }

java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/resources/org/hps/conditions/config
conditions_dev.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/resources/org/hps/conditions/config/conditions_dev.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/main/resources/org/hps/conditions/config/conditions_dev.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -75,10 +75,10 @@
                 <collection class="org.hps.conditions.svt.SvtChannel$SvtChannelCollection"/>
             </classes>
             <fields>
-                <field name="channel_id" />
-                <field name="feb_id" />
+                <field name="channel_id"    />
+                <field name="feb_id"        />
                 <field name="feb_hybrid_id" />
-                <field name="channel" />
+                <field name="channel"       />
             </fields>
         </table>
         
@@ -88,8 +88,8 @@
                 <collection class="org.hps.conditions.svt.SvtConfiguration$SvtConfigurationCollection"/>                
             </classes>
             <fields>
-                <field name="filename"/>
-                <field name="content"/>
+                <field name="filename"  />
+                <field name="content"   />
             </fields>
         
         </table>
@@ -100,9 +100,9 @@
                 <collection class="org.hps.conditions.svt.SvtGain$SvtGainCollection"/>
             </classes>            
             <fields>
-                <field name="svt_channel_id" />
-                <field name="gain" />
-                <field name="offset" />
+                <field name="svt_channel_id"    />
+                <field name="gain"              />
+                <field name="offset"            />
             </fields>
         </table>
         
@@ -112,10 +112,10 @@
                 <collection class="org.hps.conditions.svt.SvtShapeFitParameters$SvtShapeFitParametersCollection"/>
             </classes>
             <fields>
-                <field name="svt_channel_id" />
-                <field name="amplitude" />
-                <field name="t0" />
-                <field name="tp" />
+                <field name="svt_channel_id"    />
+                <field name="amplitude"         />
+                <field name="t0"                />
+                <field name="tp"                />
             </fields>        
         </table>
         
@@ -125,19 +125,19 @@
                 <collection class="org.hps.conditions.svt.SvtCalibration$SvtCalibrationCollection"/>
             </classes>
             <fields>
-                <field name="svt_channel_id" />
-                <field name="pedestal_0" />
-                <field name="pedestal_1" />
-                <field name="pedestal_2" />
-                <field name="pedestal_3" />
-                <field name="pedestal_4" />
-                <field name="pedestal_5" />
-                <field name="noise_0" />
-                <field name="noise_1" />
-                <field name="noise_2" />
-                <field name="noise_3" />
-                <field name="noise_4" />
-                <field name="noise_5" />
+                <field name="svt_channel_id"    />
+                <field name="pedestal_0"        />
+                <field name="pedestal_1"        />
+                <field name="pedestal_2"        />
+                <field name="pedestal_3"        />
+                <field name="pedestal_4"        />
+                <field name="pedestal_5"        />
+                <field name="noise_0"           />
+                <field name="noise_1"           />
+                <field name="noise_2"           />
+                <field name="noise_3"           />
+                <field name="noise_4"           />
+                <field name="noise_5"           />
             </fields>        
         </table>
         
@@ -147,9 +147,9 @@
                 <collection class="org.hps.conditions.svt.SvtT0Shift$SvtT0ShiftCollection"/>
             </classes>
             <fields>
-                <field name="feb_id" />
+                <field name="feb_id"        />
                 <field name="feb_hybrid_id" />
-                <field name="t0_shift" />
+                <field name="t0_shift"      />
             </fields>        
         </table>
         
@@ -170,12 +170,12 @@
 				<collection class="org.hps.conditions.svt.SvtDaqMapping$SvtDaqMappingCollection"/>
             </classes>
             <fields>
-                <field name="feb_id" />
+                <field name="feb_id"        />
                 <field name="feb_hybrid_id" />
-                <field name="hybrid_id" />
-                <field name="svt_half" />
-                <field name="layer" />
-                <field name="orientation" />
+                <field name="svt_half"      />
+                <field name="layer"         />
+                <field name="side"          />
+                <field name="orientation"   />
             </fields>  
         </table>
 

java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions
ConditionsDriverTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/ConditionsDriverTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/ConditionsDriverTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -22,7 +22,7 @@
 public class ConditionsDriverTest extends TestCase {
 
     // This test file has a few events from each of the "good runs" of the 2012 Test Run.
-    private static final String fileLocation = "ftp://ftp-hps.slac.stanford.edu/hps/hps_data/hps_java_test_case_data/ConditionsTest.slcio";
+    private static final String fileLocation = "http://www.lcsim.org/test/hps-java/ConditionsTest.slcio";
 
     // Number of runs that should be processed in the job.
     static final int NRUNS = 9;

java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/beam
BeamCurrentTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/beam/BeamCurrentTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/beam/BeamCurrentTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -24,7 +24,7 @@
 public class BeamCurrentTest extends TestCase {
 
     /** This test file has a few events from the "good runs" of the Test Run. */
-    private static final String fileLocation = "ftp://ftp-hps.slac.stanford.edu/hps/hps_data/hps_java_test_case_data/ConditionsTest.slcio";
+    private static final String fileLocation = "http://www.lcsim.org/test/hps-java/ConditionsTest.slcio";
 
     /** Answer key for beam current by run. */
     static Map<Integer, Double> beamCurrentAnswerKey = new HashMap<Integer, Double>();

java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/ecal
PhysicalToGainTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/ecal/PhysicalToGainTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/conditions/src/test/java/org/hps/conditions/ecal/PhysicalToGainTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -25,11 +25,14 @@
  * This is a simple example of how to retrieve the gain and noise by physical ID (X,Y) in
  * the ECAL.
  * @author Jeremy McCormick <[log in to unmask]>
+ * 
+ * @version $Id$
+ * 
  */
 public class PhysicalToGainTest extends TestCase {
 
     // This test file has a few events from each of the "good runs" of the 2012 Test Run.
-    private static final String fileLocation = "ftp://ftp-hps.slac.stanford.edu/hps/hps_data/hps_java_test_case_data/ConditionsTest.slcio";
+    private static final String fileLocation = "http://www.lcsim.org/test/hps-java/ConditionsTest.slcio";
 
     // Run the test.
     public void test() throws Exception {

java/branches/hps_java_trunk_HPSJAVA-255/datacat
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/datacat/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/datacat/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -3,12 +3,12 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-datacat</artifactId>
     <name>datacat</name>
-    <description>HPS data catalog wrappers and utilities</description>
+    <description>data catalog wrappers and utilities</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
-        <version>3.0.2-SNAPSHOT</version>
+        <version>3.0.3-SNAPSHOT</version>
     </parent>    
     <repositories>
         <repository>
@@ -36,11 +36,6 @@
     <dependencies>
         <dependency>
             <groupId>org.lcsim</groupId>
-            <artifactId>lcsim-distribution</artifactId>
-            <version>3.0.4-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.lcsim</groupId>
             <artifactId>lcio</artifactId>
             <version>2.4.4-SNAPSHOT</version>
         </dependency>
@@ -124,4 +119,4 @@
             </plugin>                               
         </plugins>
     </build>
-</project>
\ No newline at end of file
+</project>

java/branches/hps_java_trunk_HPSJAVA-255/detector-data
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/detector-data/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/detector-data/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,24 +1,20 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-detector-data</artifactId>
     <name>detector-data</name>
-    <description>detector conditions data for the HPS experiment</description>
-    
+    <description>detector conditions data including text based conditions and compact XML files</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>
-    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/detector-data/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/detector-data/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/detector-data/</developerConnection>
     </scm>
-    
     <build>
         <resources>
             <resource>
@@ -31,5 +27,4 @@
             </resource>
         </resources>
     </build>
-    
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/distribution
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/distribution/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/distribution/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,31 +1,38 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-distribution</artifactId>
     <name>distribution</name>
-    <description>runnable jar distribution and JAS plugin configuration</description>
+    <description>module for creating a standalone runnable bin jar for HPS Java</description>
     <properties>
         <maven.javadoc.skip>true</maven.javadoc.skip>
     </properties>
-
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
-    </parent>
-    
+    </parent>    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/distribution/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/distribution/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/distribution/</developerConnection>
     </scm>
-
-    <build>       
+    <build>              
         <plugins>
+<!--    
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-site-plugin</artifactId>
+                <version>3.1</version>
+                <configuration>
+                    <skip>true</skip>
+                    <skipDeploy>true</skipDeploy>
+                </configuration>
+            </plugin>        
+-->            
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-shade-plugin</artifactId>
                 <executions>
                     <execution>
@@ -63,9 +70,8 @@
             </plugin>
         </plugins>
     </build>
-
     <dependencies>
-        <!-- This pulls in analysis et al transitively. -->
+        <!-- This next dep pulls in analysis et al. transitively. -->
         <dependency>
             <groupId>org.hps</groupId>
             <artifactId>hps-users</artifactId>
@@ -79,5 +85,4 @@
             <artifactId>hps-steering-files</artifactId>
         </dependency>
     </dependencies>
-
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,24 +1,20 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-ecal-readout-sim</artifactId>
     <name>ecal-readout-sim</name>
-    <description>HPS ECAL readout simulation</description>
-    
+    <description>MC readout simulation</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>
-    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-readout-sim/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/ecal-readout-sim/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/ecal-readout-sim/</developerConnection>
     </scm>
-    
     <dependencies>
         <dependency>
             <groupId>org.hps</groupId>
@@ -29,5 +25,4 @@
             <artifactId>hps-conditions</artifactId>
         </dependency>
     </dependencies>
-    
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/src/main/java/org/hps/readout/ecal
FADCPrimaryTriggerDriver.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -28,45 +28,45 @@
     // ==================================================================
     // ==== Trigger Cut Default Parameters ==============================
     // ==================================================================
-    private int minHitCount = 1;								// Minimum required cluster hit count threshold. (Hits)			
-    private double seedEnergyHigh = Double.MAX_VALUE;			// Maximum allowed cluster seed energy. (GeV)
-    private double seedEnergyLow = Double.MIN_VALUE;			// Minimum required cluster seed energy. (GeV)
-    private double clusterEnergyHigh = 1.5 * ECalUtils.GeV;		// Maximum allowed cluster total energy. (GeV)
-    private double clusterEnergyLow = .1 * ECalUtils.GeV;		// Minimum required cluster total energy. (GeV)
-    private double energySumHigh = 1.9 * ECalUtils.GeV;			// Maximum allowed pair energy sum. (GeV)
-    private double energySumLow = 0.0 * ECalUtils.GeV;			// Minimum required pair energy sum. (GeV)
-    private double energyDifferenceHigh = 2.2 * ECalUtils.GeV;	// Maximum allowed pair energy difference. (GeV)
-    private double energySlopeLow = 1.1;						// Minimum required pair energy slope value.
-    private double coplanarityHigh = 35;						// Maximum allowed pair coplanarity deviation. (Degrees)
+    private int minHitCount = 1;                                   // Minimum required cluster hit count threshold. (Hits)            
+    private double seedEnergyHigh = Double.MAX_VALUE;              // Maximum allowed cluster seed energy. (GeV)
+    private double seedEnergyLow = Double.MIN_VALUE;               // Minimum required cluster seed energy. (GeV)
+    private double clusterEnergyHigh = 1.5 * ECalUtils.GeV;        // Maximum allowed cluster total energy. (GeV)
+    private double clusterEnergyLow = .1 * ECalUtils.GeV;          // Minimum required cluster total energy. (GeV)
+    private double energySumHigh = 1.9 * ECalUtils.GeV;            // Maximum allowed pair energy sum. (GeV)
+    private double energySumLow = 0.0 * ECalUtils.GeV;             // Minimum required pair energy sum. (GeV)
+    private double energyDifferenceHigh = 2.2 * ECalUtils.GeV;     // Maximum allowed pair energy difference. (GeV)
+    private double energySlopeLow = 1.1;                           // Minimum required pair energy slope value.
+    private double coplanarityHigh = 35;                           // Maximum allowed pair coplanarity deviation. (Degrees)
     
     // ==================================================================
     // ==== Trigger General Default Parameters ==========================
     // ==================================================================
-    private String clusterCollectionName = "EcalClusters";		// Name for the LCIO cluster collection.
-    private int pairCoincidence = 2;							// Maximum allowed time difference between clusters. (4 ns clock-cycles)
-    private double energySlopeParamF = 0.005500;				// A parameter value used for the energy slope calculation.
-    private double originX = 1393.0 * Math.tan(0.03052);		// ECal mid-plane, defined by photon beam position (30.52 mrad) at ECal face (z=1393 mm)
-    private int backgroundLevel = -1;							// Automatically sets the cuts to achieve a predetermined background rate.
+    private String clusterCollectionName = "EcalClusters";        // Name for the LCIO cluster collection.
+    private int pairCoincidence = 2;                              // Maximum allowed time difference between clusters. (4 ns clock-cycles)
+    private double energySlopeParamF = 0.005500;                  // A parameter value used for the energy slope calculation.
+    private double originX = 1393.0 * Math.tan(0.03052);          // ECal mid-plane, defined by photon beam position (30.52 mrad) at ECal face (z=1393 mm)
+    private int backgroundLevel = -1;                            // Automatically sets the cuts to achieve a predetermined background rate.
     
     // ==================================================================
     // ==== Driver Internal Variables ===================================
     // ==================================================================
-    private Queue<List<HPSEcalCluster>> topClusterQueue = null;	// Store clusters on the top half of the calorimeter.
-    private Queue<List<HPSEcalCluster>> botClusterQueue = null;	// Store clusters on the bottom half of the calorimeter.
-    private int allClusters = 0;								// Track the number of clusters processed.
-    private int allPairs = 0;									// Track the number of cluster pairs processed.
-    private int clusterTotalEnergyCount = 0;					// Track the clusters which pass the total energy cut.
-    private int clusterSeedEnergyCount = 0;						// Track the clusters which pass the seed energy cut.
-    private int clusterHitCountCount = 0;						// Track the clusters which pass the hit count cut.
-    private int pairEnergySumCount = 0;							// Track the pairs which pass the energy sum cut.
-    private int pairEnergyDifferenceCount = 0;					// Track the pairs which pass the energy difference cut.
-    private int pairEnergySlopeCount = 0;						// Track the pairs which pass the energy slope cut.
-    private int pairCoplanarityCount = 0;						// Track the pairs which pass the coplanarity cut.
+    private Queue<List<HPSEcalCluster>> topClusterQueue = null;    // Store clusters on the top half of the calorimeter.
+    private Queue<List<HPSEcalCluster>> botClusterQueue = null;    // Store clusters on the bottom half of the calorimeter.
+    private int allClusters = 0;                                   // Track the number of clusters processed.
+    private int allPairs = 0;                                      // Track the number of cluster pairs processed.
+    private int clusterTotalEnergyCount = 0;                       // Track the clusters which pass the total energy cut.
+    private int clusterSeedEnergyCount = 0;                        // Track the clusters which pass the seed energy cut.
+    private int clusterHitCountCount = 0;                          // Track the clusters which pass the hit count cut.
+    private int pairEnergySumCount = 0;                            // Track the pairs which pass the energy sum cut.
+    private int pairEnergyDifferenceCount = 0;                     // Track the pairs which pass the energy difference cut.
+    private int pairEnergySlopeCount = 0;                          // Track the pairs which pass the energy slope cut.
+    private int pairCoplanarityCount = 0;                          // Track the pairs which pass the coplanarity cut.
     
     // ==================================================================
     // ==== Trigger Distribution Plots ==================================
     // ==================================================================
-	private AIDA aida = AIDA.defaultInstance();
+    private AIDA aida = AIDA.defaultInstance();
     IHistogram1D clusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution", 176, 0.0, 2.2);
     IHistogram1D clusterSeedEnergy100 = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Over 100 MeV)", 176, 0.0, 2.2);
     IHistogram1D clusterSeedEnergySingle = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
@@ -89,34 +89,36 @@
     IHistogram1D pairEnergySlope = aida.histogram1D("Trigger Plots :: Pair Energy Slope Distribution", 400, 0.0, 4.0);
     IHistogram1D pairEnergySlopeAll = aida.histogram1D("Trigger Plots :: Pair Energy Slope Distribution (Passed All Cuts)", 400, 0.0, 4.0);
     
-	IHistogram2D clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 44, -22.0, 22.0, 10, -5, 5);
-	IHistogram2D clusterDistribution100 = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Over 100 MeV)", 44, -23, 23, 11, -5.5, 5.5);
-	IHistogram2D clusterDistributionSingle = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 44, -23, 23, 11, -5.5, 5.5);
-	IHistogram2D clusterDistributionAll = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+    IHistogram2D clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 46, -23, 23, 11, -5.5, 5.5);
+    IHistogram2D clusterDistribution100 = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Over 100 MeV)", 46, -23, 23, 11, -5.5, 5.5);
+    IHistogram2D clusterDistributionSingle = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 46, -23, 23, 11, -5.5, 5.5);
+    IHistogram2D clusterDistributionAll = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 46, -23, 23, 11, -5.5, 5.5);
     
+    IHistogram1D hotCrystalEnergy = aida.histogram1D("Trigger Plots :: Hot Crystal Energy Distribution", 176, 0.0, 2.2);
+    
     /**
      * Prints out the results of the trigger at the end of the run.
      */
     @Override
     public void endOfData() {
-    	// Print out the results of the trigger cuts.
-    	System.out.printf("Trigger Processing Results%n");
-    	System.out.printf("\tSingle-Cluster Cuts%n");
-    	System.out.printf("\t\tTotal Clusters Processed     :: %d%n", allClusters);
-    	System.out.printf("\t\tPassed Seed Energy Cut       :: %d%n", clusterSeedEnergyCount);
-    	System.out.printf("\t\tPassed Hit Count Cut         :: %d%n", clusterHitCountCount);
-    	System.out.printf("\t\tPassed Total Energy Cut      :: %d%n", clusterTotalEnergyCount);
-    	System.out.printf("%n");
-    	System.out.printf("\tCluster Pair Cuts%n");
-    	System.out.printf("\t\tTotal Pairs Processed        :: %d%n", allPairs);
-    	System.out.printf("\t\tPassed Energy Sum Cut        :: %d%n", pairEnergySumCount);
-    	System.out.printf("\t\tPassed Energy Difference Cut :: %d%n", pairEnergyDifferenceCount);
-    	System.out.printf("\t\tPassed Energy Slope Cut      :: %d%n", pairEnergySlopeCount);
-    	System.out.printf("\t\tPassed Coplanarity Cut       :: %d%n", pairCoplanarityCount);
-    	System.out.printf("%n");
-    	System.out.printf("\tTrigger Count :: %d%n", numTriggers);
-    	
-    	// Run the superclass method.
+        // Print out the results of the trigger cuts.
+        System.out.printf("Trigger Processing Results%n");
+        System.out.printf("\tSingle-Cluster Cuts%n");
+        System.out.printf("\t\tTotal Clusters Processed     :: %d%n", allClusters);
+        System.out.printf("\t\tPassed Seed Energy Cut       :: %d%n", clusterSeedEnergyCount);
+        System.out.printf("\t\tPassed Hit Count Cut         :: %d%n", clusterHitCountCount);
+        System.out.printf("\t\tPassed Total Energy Cut      :: %d%n", clusterTotalEnergyCount);
+        System.out.printf("%n");
+        System.out.printf("\tCluster Pair Cuts%n");
+        System.out.printf("\t\tTotal Pairs Processed        :: %d%n", allPairs);
+        System.out.printf("\t\tPassed Energy Sum Cut        :: %d%n", pairEnergySumCount);
+        System.out.printf("\t\tPassed Energy Difference Cut :: %d%n", pairEnergyDifferenceCount);
+        System.out.printf("\t\tPassed Energy Slope Cut      :: %d%n", pairEnergySlopeCount);
+        System.out.printf("\t\tPassed Coplanarity Cut       :: %d%n", pairCoplanarityCount);
+        System.out.printf("%n");
+        System.out.printf("\tTrigger Count :: %d%n", numTriggers);
+        
+        // Run the superclass method.
         super.endOfData();
     }
     
@@ -126,81 +128,89 @@
      */
     @Override
     public void process(EventHeader event) {
-    	// Process the list of clusters for the event, if it exists.
+        // Process the list of clusters for the event, if it exists.
         if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
-        	// Get the collection of clusters.
-        	List<HPSEcalCluster> clusterList = event.get(HPSEcalCluster.class, clusterCollectionName);
-        	
-        	// Create a list to hold clusters which pass the single
-        	// cluster cuts.
-        	List<HPSEcalCluster> goodClusterList = new ArrayList<HPSEcalCluster>(clusterList.size());
-        	
-        	// Sort through the cluster list and add clusters that pass
-        	// the single cluster cuts to the good list.
-        	clusterLoop:
-        	for(HPSEcalCluster cluster : clusterList) {
-        		// Increment the number of processed clusters.
-        		allClusters++;
-        		
-        		// Get the cluster plot values.
-        		int hitCount = cluster.getCalorimeterHits().size();
-        		double seedEnergy = cluster.getSeedHit().getCorrectedEnergy();
-        		double clusterEnergy = cluster.getEnergy();
-        		int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
-        		int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
-        		if(ix > 0) { ix = ix - 1; }
-        		
-        		// Fill the general plots.
-        		clusterSeedEnergy.fill(seedEnergy, 1);
-        		clusterTotalEnergy.fill(clusterEnergy, 1);
-        		clusterHitCount.fill(hitCount, 1);
-        		clusterDistribution.fill(ix, iy, 1);
-        		
-        		// Fill the "over 100 MeV" plots if applicable.
-        		if(seedEnergy >= 0.100) {
-            		clusterSeedEnergy100.fill(seedEnergy, 1);
-            		clusterTotalEnergy100.fill(clusterEnergy, 1);
-            		clusterHitCount100.fill(hitCount, 1);
-            		clusterDistribution100.fill(ix, iy, 1);
-        		}
-        		
-        		// ==== Seed Hit Energy Cut ====================================
-        		// =============================================================
-        		// If the cluster fails the cut, skip to the next cluster.
-        		if(!clusterSeedEnergyCut(cluster)) { continue clusterLoop; }
-        		
-        		// Otherwise, note that it passed the cut.
-        		clusterSeedEnergyCount++;
-        		
-        		// ==== Cluster Hit Count Cut ==================================
-        		// =============================================================
-        		// If the cluster fails the cut, skip to the next cluster.
-        		if(!clusterHitCountCut(cluster)) { continue clusterLoop; }
-        		
-        		// Otherwise, note that it passed the cut.
-        		clusterHitCountCount++;
-        		
-        		// ==== Cluster Total Energy Cut ===============================
-        		// =============================================================
-        		// If the cluster fails the cut, skip to the next cluster.
-        		if(!clusterTotalEnergyCut(cluster)) { continue clusterLoop; }
-        		
-        		// Otherwise, note that it passed the cut.
-        		clusterTotalEnergyCount++;
-        		
-        		// Fill the "passed single cuts" plots.
-        		clusterSeedEnergySingle.fill(seedEnergy, 1);
-        		clusterTotalEnergySingle.fill(clusterEnergy, 1);
-        		clusterHitCountSingle.fill(hitCount, 1);
-        		clusterDistributionSingle.fill(ix, iy, 1);
-        		
-        		// A cluster that passes all of the single-cluster cuts
-        		// can be used in cluster pairs.
-        		goodClusterList.add(cluster);
-        	}
-        	
-        	// Put the good clusters into the cluster queue.
-        	updateClusterQueues(goodClusterList);
+            // Get the collection of clusters.
+            List<HPSEcalCluster> clusterList = event.get(HPSEcalCluster.class, clusterCollectionName);
+            
+            // Create a list to hold clusters which pass the single
+            // cluster cuts.
+            List<HPSEcalCluster> goodClusterList = new ArrayList<HPSEcalCluster>(clusterList.size());
+            
+            // Sort through the cluster list and add clusters that pass
+            // the single cluster cuts to the good list.
+            clusterLoop:
+            for(HPSEcalCluster cluster : clusterList) {
+                // Increment the number of processed clusters.
+                allClusters++;
+                
+                // Get the cluster plot values.
+                int hitCount = cluster.getCalorimeterHits().size();
+                double seedEnergy = cluster.getSeedHit().getCorrectedEnergy();
+                double clusterEnergy = cluster.getEnergy();
+                int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
+                int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
+                
+                // If the cluster is in the "hot" region, write out its
+                // energy to a special plot.
+                if((iy == 1 || iy == -1) && (ix == -1 || ix == 1 || ix == 2)) {
+                    hotCrystalEnergy.fill(clusterEnergy, 1);
+                }
+                
+                // Correct for "hole" on the x-axis for plotting.
+                if(ix > 0) { ix = ix - 1; }
+                
+                // Fill the general plots.
+                clusterSeedEnergy.fill(seedEnergy, 1);
+                clusterTotalEnergy.fill(clusterEnergy, 1);
+                clusterHitCount.fill(hitCount, 1);
+                clusterDistribution.fill(ix, iy, 1);
+                
+                // Fill the "over 100 MeV" plots if applicable.
+                if(seedEnergy >= 0.100) {
+                    clusterSeedEnergy100.fill(seedEnergy, 1);
+                    clusterTotalEnergy100.fill(clusterEnergy, 1);
+                    clusterHitCount100.fill(hitCount, 1);
+                    clusterDistribution100.fill(ix, iy, 1);
+                }
+                
+                // ==== Seed Hit Energy Cut ====================================
+                // =============================================================
+                // If the cluster fails the cut, skip to the next cluster.
+                if(!clusterSeedEnergyCut(cluster)) { continue clusterLoop; }
+                
+                // Otherwise, note that it passed the cut.
+                clusterSeedEnergyCount++;
+                
+                // ==== Cluster Hit Count Cut ==================================
+                // =============================================================
+                // If the cluster fails the cut, skip to the next cluster.
+                if(!clusterHitCountCut(cluster)) { continue clusterLoop; }
+                
+                // Otherwise, note that it passed the cut.
+                clusterHitCountCount++;
+                
+                // ==== Cluster Total Energy Cut ===============================
+                // =============================================================
+                // If the cluster fails the cut, skip to the next cluster.
+                if(!clusterTotalEnergyCut(cluster)) { continue clusterLoop; }
+                
+                // Otherwise, note that it passed the cut.
+                clusterTotalEnergyCount++;
+                
+                // Fill the "passed single cuts" plots.
+                clusterSeedEnergySingle.fill(seedEnergy, 1);
+                clusterTotalEnergySingle.fill(clusterEnergy, 1);
+                clusterHitCountSingle.fill(hitCount, 1);
+                clusterDistributionSingle.fill(ix, iy, 1);
+                
+                // A cluster that passes all of the single-cluster cuts
+                // can be used in cluster pairs.
+                goodClusterList.add(cluster);
+            }
+            
+            // Put the good clusters into the cluster queue.
+            updateClusterQueues(goodClusterList);
         }
         
         // Perform the superclass event processing.
@@ -214,7 +224,7 @@
      * be set. Actual background rates equal about (5 * backgroundLevel) kHz.
      */
     public void setBackgroundLevel(int backgroundLevel) {
-    	this.backgroundLevel = backgroundLevel;
+        this.backgroundLevel = backgroundLevel;
     }
     
     /**
@@ -277,7 +287,7 @@
      * @param energySlopeLow - The parameter value.
      */
     public void setEnergySlopeLow(double energySlopeLow) {
-    	this.energySlopeLow = energySlopeLow;
+        this.energySlopeLow = energySlopeLow;
     }
     
     /**
@@ -361,12 +371,12 @@
      */
     @Override
     public void startOfData() {
-    	// Make sure that a valid cluster collection name has been
-    	// defined. If it has not, throw an exception.
+        // Make sure that a valid cluster collection name has been
+        // defined. If it has not, throw an exception.
         if (clusterCollectionName == null) {
             throw new RuntimeException("The parameter clusterCollectionName was not set!");
         }
-    	
+        
         // Initialize the top and bottom cluster queues.
         topClusterQueue = new LinkedList<List<HPSEcalCluster>>();
         botClusterQueue = new LinkedList<List<HPSEcalCluster>>();
@@ -407,9 +417,9 @@
         for (HPSEcalCluster botCluster : botClusterQueue.element()) {
             for (List<HPSEcalCluster> topClusters : topClusterQueue) {
                 for (HPSEcalCluster topCluster : topClusters) {
-                	// The first cluster in a pair should always be
-                	// the higher energy cluster. If the top cluster
-                	// is higher energy, it goes first.
+                    // The first cluster in a pair should always be
+                    // the higher energy cluster. If the top cluster
+                    // is higher energy, it goes first.
                     if (topCluster.getEnergy() > botCluster.getEnergy()) {
                         HPSEcalCluster[] clusterPair = {topCluster, botCluster};
                         clusterPairs.add(clusterPair);
@@ -428,24 +438,24 @@
         return clusterPairs;
     }
     
-	/**
-	 * Determines if the event produces a trigger.
-	 * 
-	 * @return Returns <code>true</code> if the event produces a trigger
-	 * and <code>false</code> if it does not.
-	 */
-	@Override
-	protected boolean triggerDecision(EventHeader event) {
-    	// If there is a list of clusters present for this event,
-    	// check whether it passes the trigger conditions.
-    	if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
-        	return testTrigger();
+    /**
+     * Determines if the event produces a trigger.
+     * 
+     * @return Returns <code>true</code> if the event produces a trigger
+     * and <code>false</code> if it does not.
+     */
+    @Override
+    protected boolean triggerDecision(EventHeader event) {
+        // If there is a list of clusters present for this event,
+        // check whether it passes the trigger conditions.
+        if (event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
+            return testTrigger();
         }
         
         // Otherwise, this event can not produce a trigger and should
         // return false automatically.
         else { return false; }
-	}
+    }
     
     /**
      * Checks whether the argument cluster possesses the minimum
@@ -456,7 +466,7 @@
      * and <code>false</code> if the cluster does not.
      */
     private boolean clusterHitCountCut(HPSEcalCluster cluster) {
-    	return (getValueClusterHitCount(cluster) >= minHitCount);
+        return (getValueClusterHitCount(cluster) >= minHitCount);
     }
     
     /**
@@ -468,12 +478,12 @@
      * and <code>false</code> if the cluster does not.
      */
     private boolean clusterSeedEnergyCut(HPSEcalCluster cluster) {
-    	// Get the cluster seed energy.
-    	double energy = getValueClusterSeedEnergy(cluster);
-    	
-    	// Check that it is above the minimum threshold and below the
-    	// maximum threshold.
-    	return (energy < seedEnergyHigh) && (energy > seedEnergyLow);
+        // Get the cluster seed energy.
+        double energy = getValueClusterSeedEnergy(cluster);
+        
+        // Check that it is above the minimum threshold and below the
+        // maximum threshold.
+        return (energy < seedEnergyHigh) && (energy > seedEnergyLow);
     }
     
     /**
@@ -485,12 +495,12 @@
      * and <code>false</code> if the cluster does not.
      */
     private boolean clusterTotalEnergyCut(HPSEcalCluster cluster) {
-    	// Get the total cluster energy.
-    	double energy = getValueClusterTotalEnergy(cluster);
-    	
-    	// Check that it is above the minimum threshold and below the
-    	// maximum threshold.
-    	return (energy < clusterEnergyHigh) && (energy > clusterEnergyLow);
+        // Get the total cluster energy.
+        double energy = getValueClusterTotalEnergy(cluster);
+        
+        // Check that it is above the minimum threshold and below the
+        // maximum threshold.
+        return (energy < clusterEnergyHigh) && (energy > clusterEnergyLow);
     }
     
     /**
@@ -512,7 +522,7 @@
      * @return Returns the cut value.
      */
     private double getValueClusterTotalEnergy(HPSEcalCluster cluster) {
-    	return cluster.getEnergy();
+        return cluster.getEnergy();
     }
     
     /**
@@ -523,7 +533,7 @@
      * @return Returns the cut value.
      */
     private int getValueClusterHitCount(HPSEcalCluster cluster) {
-    	return cluster.getCalorimeterHits().size();
+        return cluster.getCalorimeterHits().size();
     }
     
     /**
@@ -534,7 +544,7 @@
      * @return Returns the cut value.
      */
     private double getValueClusterSeedEnergy(HPSEcalCluster cluster) {
-    	return cluster.getSeedHit().getCorrectedEnergy();
+        return cluster.getSeedHit().getCorrectedEnergy();
     }
     
     /**
@@ -545,14 +555,14 @@
      * @return Returns the cut value.
      */
     private double getValueCoplanarity(HPSEcalCluster[] clusterPair) {
-    	// Get the cluster angles.
-    	double[] clusterAngle = new double[2];
-    	for(int i = 0; i < 2; i++) {
+        // Get the cluster angles.
+        double[] clusterAngle = new double[2];
+        for(int i = 0; i < 2; i++) {
             double position[] = clusterPair[i].getSeedHit().getPosition();
             clusterAngle[i] = (Math.toDegrees(Math.atan2(position[1], position[0] - originX)) + 180.0) % 180.0;
-    	}
-    	
-    	// Calculate the coplanarity cut value.
+        }
+        
+        // Calculate the coplanarity cut value.
         return Math.abs(clusterAngle[1] - clusterAngle[0]);
     }
     
@@ -564,7 +574,7 @@
      * @return Returns the cut value.
      */
     private double getValueEnergyDifference(HPSEcalCluster[] clusterPair) {
-    	return clusterPair[0].getEnergy() - clusterPair[1].getEnergy();
+        return clusterPair[0].getEnergy() - clusterPair[1].getEnergy();
     }
     
     /**
@@ -575,15 +585,15 @@
      * @return Returns the cut value.
      */
     private double getValueEnergySlope(HPSEcalCluster[] clusterPair) {
-    	// E + R*F
-    	// Get the low energy cluster energy.
-    	double slopeParamE = clusterPair[1].getEnergy();
-    	
-    	// Get the low energy cluster radial distance.
-    	double slopeParamR = getClusterDistance(clusterPair[1]);
-    	
-    	// Calculate the energy slope.
-    	return slopeParamE + slopeParamR * energySlopeParamF;
+        // E + R*F
+        // Get the low energy cluster energy.
+        double slopeParamE = clusterPair[1].getEnergy();
+        
+        // Get the low energy cluster radial distance.
+        double slopeParamR = getClusterDistance(clusterPair[1]);
+        
+        // Calculate the energy slope.
+        return slopeParamE + slopeParamR * energySlopeParamF;
     }
     
     /**
@@ -594,7 +604,7 @@
      * @return Returns the cut value.
      */
     private double getValueEnergySum(HPSEcalCluster[] clusterPair) {
-    	return clusterPair[0].getEnergy() + clusterPair[1].getEnergy();
+        return clusterPair[0].getEnergy() + clusterPair[1].getEnergy();
     }
     
     /**
@@ -630,7 +640,7 @@
      * @return true if pair is found, false otherwise
      */
     private boolean pairEnergySlopeCut(HPSEcalCluster[] clusterPair) {
-    	return (getValueEnergySlope(clusterPair) > energySlopeLow);
+        return (getValueEnergySlope(clusterPair) > energySlopeLow);
     }
     
     /**
@@ -642,216 +652,216 @@
      * the cut and <code>false</code> if it does not.
      */
     private boolean pairEnergySumCut(HPSEcalCluster[] clusterPair) {
-    	// Get the energy sum value.
-    	double energySum = getValueEnergySum(clusterPair);
-    	
-    	// Check that it is within the allowed range.
+        // Get the energy sum value.
+        double energySum = getValueEnergySum(clusterPair);
+        
+        // Check that it is within the allowed range.
         return (energySum < energySumHigh) && (energySum > energySumLow);
     }
-	
+    
     private void setBackgroundCuts(int backgroundLevel) {
-    	// Make sure that the background level is valid.
-    	if(backgroundLevel < 1 || backgroundLevel > 10) {
-    		throw new RuntimeException(String.format("Trigger cuts are undefined for background level %d.", backgroundLevel));
-    	}
-    	
-    	// Otherwise, set the trigger cuts. Certain cuts are constant
-    	// across all background levels.
-    	clusterEnergyLow = 0.000;
-    	seedEnergyLow = 0.100;
-    	
-    	// Set the variable values.
-    	if(backgroundLevel == 1) {
-    		clusterEnergyHigh = 1.700;
-    		seedEnergyHigh = 1.300;
-    		energySumLow = 0.400;
-    		energySumHigh = 2.00;
-    		energyDifferenceHigh = 1.500;
-    		energySlopeLow = 1.0;
-    		coplanarityHigh = 40;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 2) {
-    		clusterEnergyHigh = 1.600;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.300;
-    		energySumHigh = 2.00;
-    		energyDifferenceHigh = 1.400;
-    		energySlopeLow = 0.8;
-    		coplanarityHigh = 40;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 3) {
-    		clusterEnergyHigh = 1.600;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.200;
-    		energySumHigh = 2.000;
-    		energyDifferenceHigh = 1.400;
-    		energySlopeLow = 0.7;
-    		coplanarityHigh = 40;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 4) {
-    		clusterEnergyHigh = 1.500;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.500;
-    		energySumHigh = 1.950;
-    		energyDifferenceHigh = 1.400;
-    		energySlopeLow = 0.6;
-    		coplanarityHigh = 40;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 5) {
-    		clusterEnergyHigh = 1.500;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.400;
-    		energySumHigh = 2.000;
-    		energyDifferenceHigh = 1.400;
-    		energySlopeLow = 0.6;
-    		coplanarityHigh = 45;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 6) {
-    		clusterEnergyHigh = 1.500;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.200;
-    		energySumHigh = 1.950;
-    		energyDifferenceHigh = 1.400;
-    		energySlopeLow = 0.6;
-    		coplanarityHigh = 55;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 7) {
-    		clusterEnergyHigh = 1.700;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.200;
-    		energySumHigh = 2.000;
-    		energyDifferenceHigh = 1.500;
-    		energySlopeLow = 0.6;
-    		coplanarityHigh = 60;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 8) {
-    		clusterEnergyHigh = 1.700;
-    		seedEnergyHigh = 1.300;
-    		energySumLow = 0.200;
-    		energySumHigh = 2.000;
-    		energyDifferenceHigh = 1.500;
-    		energySlopeLow = 0.6;
-    		coplanarityHigh = 65;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 9) {
-    		clusterEnergyHigh = 1.500;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.400;
-    		energySumHigh = 1.950;
-    		energyDifferenceHigh = 1.400;
-    		energySlopeLow = 0.5;
-    		coplanarityHigh = 60;
-    		minHitCount = 2;
-    	} else if(backgroundLevel == 10) {
-    		clusterEnergyHigh = 1.500;
-    		seedEnergyHigh = 1.200;
-    		energySumLow = 0.400;
-    		energySumHigh = 2.000;
-    		energyDifferenceHigh = 1.400;
-    		energySlopeLow = 0.5;
-    		coplanarityHigh = 65;
-    		minHitCount = 2;
-    	}
+        // Make sure that the background level is valid.
+        if(backgroundLevel < 1 || backgroundLevel > 10) {
+            throw new RuntimeException(String.format("Trigger cuts are undefined for background level %d.", backgroundLevel));
+        }
+        
+        // Otherwise, set the trigger cuts. Certain cuts are constant
+        // across all background levels.
+        clusterEnergyLow = 0.000;
+        seedEnergyLow = 0.100;
+        
+        // Set the variable values.
+        if(backgroundLevel == 1) {
+            clusterEnergyHigh = 1.700;
+            seedEnergyHigh = 1.300;
+            energySumLow = 0.400;
+            energySumHigh = 2.00;
+            energyDifferenceHigh = 1.500;
+            energySlopeLow = 1.0;
+            coplanarityHigh = 40;
+            minHitCount = 2;
+        } else if(backgroundLevel == 2) {
+            clusterEnergyHigh = 1.600;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.300;
+            energySumHigh = 2.00;
+            energyDifferenceHigh = 1.400;
+            energySlopeLow = 0.8;
+            coplanarityHigh = 40;
+            minHitCount = 2;
+        } else if(backgroundLevel == 3) {
+            clusterEnergyHigh = 1.600;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.200;
+            energySumHigh = 2.000;
+            energyDifferenceHigh = 1.400;
+            energySlopeLow = 0.7;
+            coplanarityHigh = 40;
+            minHitCount = 2;
+        } else if(backgroundLevel == 4) {
+            clusterEnergyHigh = 1.500;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.500;
+            energySumHigh = 1.950;
+            energyDifferenceHigh = 1.400;
+            energySlopeLow = 0.6;
+            coplanarityHigh = 40;
+            minHitCount = 2;
+        } else if(backgroundLevel == 5) {
+            clusterEnergyHigh = 1.500;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.400;
+            energySumHigh = 2.000;
+            energyDifferenceHigh = 1.400;
+            energySlopeLow = 0.6;
+            coplanarityHigh = 45;
+            minHitCount = 2;
+        } else if(backgroundLevel == 6) {
+            clusterEnergyHigh = 1.500;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.200;
+            energySumHigh = 1.950;
+            energyDifferenceHigh = 1.400;
+            energySlopeLow = 0.6;
+            coplanarityHigh = 55;
+            minHitCount = 2;
+        } else if(backgroundLevel == 7) {
+            clusterEnergyHigh = 1.700;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.200;
+            energySumHigh = 2.000;
+            energyDifferenceHigh = 1.500;
+            energySlopeLow = 0.6;
+            coplanarityHigh = 60;
+            minHitCount = 2;
+        } else if(backgroundLevel == 8) {
+            clusterEnergyHigh = 1.700;
+            seedEnergyHigh = 1.300;
+            energySumLow = 0.200;
+            energySumHigh = 2.000;
+            energyDifferenceHigh = 1.500;
+            energySlopeLow = 0.6;
+            coplanarityHigh = 65;
+            minHitCount = 2;
+        } else if(backgroundLevel == 9) {
+            clusterEnergyHigh = 1.500;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.400;
+            energySumHigh = 1.950;
+            energyDifferenceHigh = 1.400;
+            energySlopeLow = 0.5;
+            coplanarityHigh = 60;
+            minHitCount = 2;
+        } else if(backgroundLevel == 10) {
+            clusterEnergyHigh = 1.500;
+            seedEnergyHigh = 1.200;
+            energySumLow = 0.400;
+            energySumHigh = 2.000;
+            energyDifferenceHigh = 1.400;
+            energySlopeLow = 0.5;
+            coplanarityHigh = 65;
+            minHitCount = 2;
+        }
     }
     
-	/**
-	 * Tests all of the current cluster pairs for triggers.
-	 * 
-	 * @return Returns <code>true</code> if one of the cluster pairs
-	 * passes all of the cluster cuts and <code>false</code> otherwise.
-	 */
+    /**
+     * Tests all of the current cluster pairs for triggers.
+     * 
+     * @return Returns <code>true</code> if one of the cluster pairs
+     * passes all of the cluster cuts and <code>false</code> otherwise.
+     */
     private boolean testTrigger() {
-    	// Get the list of cluster pairs.
-    	List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
+        // Get the list of cluster pairs.
+        List<HPSEcalCluster[]> clusterPairs = getClusterPairsTopBot();
         
         // Iterate over the cluster pairs and perform each of the cluster
         // pair cuts on them. A cluster pair that passes all of the
         // cuts registers as a trigger.
-    	pairLoop:
+        pairLoop:
         for (HPSEcalCluster[] clusterPair : clusterPairs) {
-    		// Increment the number of processed cluster pairs.
-    		allPairs++;
-    		
-    		// Get the plot values for the pair cuts.
-    		double energySum = getValueEnergySum(clusterPair);
-    		double energyDifference = getValueEnergyDifference(clusterPair);
-    		double energySlope = getValueEnergySlope(clusterPair);
-    		double coplanarity = getValueCoplanarity(clusterPair);
-    		
-    		// Fill the general plots.
-    		pairEnergySum.fill(energySum, 1);
-    		pairEnergyDifference.fill(energyDifference, 1);
-    		pairEnergySlope.fill(energySlope, 1);
-    		pairCoplanarity.fill(coplanarity, 1);
-    		
-    		// ==== Pair Energy Sum Cut ====================================
-    		// =============================================================
-    		// If the cluster fails the cut, skip to the next pair.
-    		if(!pairEnergySumCut(clusterPair)) { continue pairLoop; }
-    		
-    		// Otherwise, note that it passed the cut.
-    		pairEnergySumCount++;
-        	
-    		// ==== Pair Energy Difference Cut =============================
-    		// =============================================================
-    		// If the cluster fails the cut, skip to the next pair.
-    		if(!pairEnergyDifferenceCut(clusterPair)) { continue pairLoop; }
-    		
-    		// Otherwise, note that it passed the cut.
-    		pairEnergyDifferenceCount++;
-    		
-    		// ==== Pair Energy Slope Cut ==================================
-    		// =============================================================
-    		// If the cluster fails the cut, skip to the next pair.
-    		//if(!energyDistanceCut(clusterPair)) { continue pairLoop; }
-    		if(!pairEnergySlopeCut(clusterPair)) { continue pairLoop; }
-    		
-    		// Otherwise, note that it passed the cut.
-    		pairEnergySlopeCount++;
-    		
-    		// ==== Pair Coplanarity Cut ===================================
-    		// =============================================================
-    		// If the cluster fails the cut, skip to the next pair.
-    		if(!pairCoplanarityCut(clusterPair)) { continue pairLoop; }
-    		
-    		// Otherwise, note that it passed the cut.
-    		pairCoplanarityCount++;
-    		
-    		// Get the cluster plot values.
-    		int[] hitCount = new int[2];
-    		double[] seedEnergy = new double[2];
-    		double[] clusterEnergy = new double[2];
-    		int[] ix = new int[2];
-    		int[] iy = new int[2];
-    		for(int i = 0; i < 2; i++) {
-    			hitCount[i] = clusterPair[i].getCalorimeterHits().size();
-    			seedEnergy[i] = clusterPair[i].getSeedHit().getCorrectedEnergy();
-    			clusterEnergy[i] = clusterPair[i].getEnergy();
-    			ix[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("ix");
-    			iy[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("iy");
-    			if(ix[i] > 0) { ix[i] = ix[i] - 1; }
-    		}
-    		
-    		// Fill the general plots.
-    		for(int i = 0; i < 2; i++) {
-	    		clusterSeedEnergyAll.fill(seedEnergy[i], 1);
-	    		clusterTotalEnergyAll.fill(clusterEnergy[i], 1);
-	    		clusterHitCountAll.fill(hitCount[i], 1);
-	    		clusterDistributionAll.fill(ix[i], iy[i], 1);
-    		}
-    		
-    		// Fill the "passed all cuts" plots.
-    		pairEnergySumAll.fill(energySum, 1);
-    		pairEnergyDifferenceAll.fill(energyDifference, 1);
-    		pairEnergySlopeAll.fill(energySlope, 1);
-    		pairCoplanarityAll.fill(coplanarity, 1);
-    		
-    		// Clusters that pass all of the pair cuts produce a trigger.
-    		return true;
+            // Increment the number of processed cluster pairs.
+            allPairs++;
+            
+            // Get the plot values for the pair cuts.
+            double energySum = getValueEnergySum(clusterPair);
+            double energyDifference = getValueEnergyDifference(clusterPair);
+            double energySlope = getValueEnergySlope(clusterPair);
+            double coplanarity = getValueCoplanarity(clusterPair);
+            
+            // Fill the general plots.
+            pairEnergySum.fill(energySum, 1);
+            pairEnergyDifference.fill(energyDifference, 1);
+            pairEnergySlope.fill(energySlope, 1);
+            pairCoplanarity.fill(coplanarity, 1);
+            
+            // ==== Pair Energy Sum Cut ====================================
+            // =============================================================
+            // If the cluster fails the cut, skip to the next pair.
+            if(!pairEnergySumCut(clusterPair)) { continue pairLoop; }
+            
+            // Otherwise, note that it passed the cut.
+            pairEnergySumCount++;
+            
+            // ==== Pair Energy Difference Cut =============================
+            // =============================================================
+            // If the cluster fails the cut, skip to the next pair.
+            if(!pairEnergyDifferenceCut(clusterPair)) { continue pairLoop; }
+            
+            // Otherwise, note that it passed the cut.
+            pairEnergyDifferenceCount++;
+            
+            // ==== Pair Energy Slope Cut ==================================
+            // =============================================================
+            // If the cluster fails the cut, skip to the next pair.
+            //if(!energyDistanceCut(clusterPair)) { continue pairLoop; }
+            if(!pairEnergySlopeCut(clusterPair)) { continue pairLoop; }
+            
+            // Otherwise, note that it passed the cut.
+            pairEnergySlopeCount++;
+            
+            // ==== Pair Coplanarity Cut ===================================
+            // =============================================================
+            // If the cluster fails the cut, skip to the next pair.
+            if(!pairCoplanarityCut(clusterPair)) { continue pairLoop; }
+            
+            // Otherwise, note that it passed the cut.
+            pairCoplanarityCount++;
+            
+            // Get the cluster plot values.
+            int[] hitCount = new int[2];
+            double[] seedEnergy = new double[2];
+            double[] clusterEnergy = new double[2];
+            int[] ix = new int[2];
+            int[] iy = new int[2];
+            for(int i = 0; i < 2; i++) {
+                hitCount[i] = clusterPair[i].getCalorimeterHits().size();
+                seedEnergy[i] = clusterPair[i].getSeedHit().getCorrectedEnergy();
+                clusterEnergy[i] = clusterPair[i].getEnergy();
+                ix[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("ix");
+                iy[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("iy");
+                if(ix[i] > 0) { ix[i] = ix[i] - 1; }
+            }
+            
+            // Fill the general plots.
+            for(int i = 0; i < 2; i++) {
+                clusterSeedEnergyAll.fill(seedEnergy[i], 1);
+                clusterTotalEnergyAll.fill(clusterEnergy[i], 1);
+                clusterHitCountAll.fill(hitCount[i], 1);
+                clusterDistributionAll.fill(ix[i], iy[i], 1);
+            }
+            
+            // Fill the "passed all cuts" plots.
+            pairEnergySumAll.fill(energySum, 1);
+            pairEnergyDifferenceAll.fill(energyDifference, 1);
+            pairEnergySlopeAll.fill(energySlope, 1);
+            pairCoplanarityAll.fill(coplanarity, 1);
+            
+            // Clusters that pass all of the pair cuts produce a trigger.
+            return true;
         }
         
         // If the loop terminates without producing a trigger, there
-    	// are no cluster pairs which meet the trigger conditions.
+        // are no cluster pairs which meet the trigger conditions.
         return false;
     }
     
@@ -862,14 +872,14 @@
      * @param clusterList - The clusters to add to the queues.
      */
     private void updateClusterQueues(List<HPSEcalCluster> clusterList) {
-    	// Create lists to store the top and bottom clusters.
+        // Create lists to store the top and bottom clusters.
         ArrayList<HPSEcalCluster> topClusterList = new ArrayList<HPSEcalCluster>();
         ArrayList<HPSEcalCluster> botClusterList = new ArrayList<HPSEcalCluster>();
         
         // Loop over the clusters in the cluster list.
         for (HPSEcalCluster cluster : clusterList) {
-        	// If the cluster is on the top of the calorimeter, it
-        	// goes into the top cluster list.
+            // If the cluster is on the top of the calorimeter, it
+            // goes into the top cluster list.
             if (cluster.getSeedHit().getIdentifierFieldValue("iy") > 0) {
                 topClusterList.add(cluster);
             }
@@ -886,4 +896,4 @@
         topClusterQueue.remove();
         botClusterQueue.remove();
     }
-}^M
+}
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/src/main/java/org/hps/readout/ecal
NeutralPionTriggerDriver.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/src/main/java/org/hps/readout/ecal/NeutralPionTriggerDriver.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/ecal-readout-sim/src/main/java/org/hps/readout/ecal/NeutralPionTriggerDriver.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -48,1034 +48,1034 @@
  * @author Michel Garçon
  */
 public class NeutralPionTriggerDriver extends TriggerDriver {
-	
-	// ==================================================================
-	// ==== Trigger Algorithms ==========================================
-	// ==================================================================	
-	
+    
+    // ==================================================================
+    // ==== Trigger Algorithms ==========================================
+    // ==================================================================    
+    
     @Override
     public void endOfData() {
-    	// Print out the results of the trigger cuts.
-    	System.out.printf("Trigger Processing Results%n");
-    	System.out.printf("\tSingle-Cluster Cuts%n");
-    	System.out.printf("\t\tTotal Clusters Processed     :: %d%n", allClusters);
-    	System.out.printf("\t\tPassed Seed Energy Cut       :: %d%n", clusterSeedEnergyCount);
-    	System.out.printf("\t\tPassed Hit Count Cut         :: %d%n", clusterHitCountCount);
-    	if(rejectEdgeCrystals) {
-    		System.out.printf("\t\tPassed Edge Crystal Cut      :: %d%n", clusterEdgeCount);
-    	}
-    	System.out.printf("%n");
-    	System.out.printf("\tCluster Pair Cuts%n");
-    	System.out.printf("\t\tTotal Pairs Processed        :: %d%n", allPairs);
-    	System.out.printf("\t\tPassed Energy Sum Cut        :: %d%n", pairEnergySumCount);
-    	System.out.printf("\t\tPassed Energy Invariant Mass :: %d%n", pairInvariantMassCount);
-    	System.out.printf("%n");
-    	System.out.printf("\tTrigger Count :: %d%n", triggers);
-    	
-    	// Run the superclass method.
+        // Print out the results of the trigger cuts.
+        System.out.printf("Trigger Processing Results%n");
+        System.out.printf("\tSingle-Cluster Cuts%n");
+        System.out.printf("\t\tTotal Clusters Processed     :: %d%n", allClusters);
+        System.out.printf("\t\tPassed Seed Energy Cut       :: %d%n", clusterSeedEnergyCount);
+        System.out.printf("\t\tPassed Hit Count Cut         :: %d%n", clusterHitCountCount);
+        if(rejectEdgeCrystals) {
+            System.out.printf("\t\tPassed Edge Crystal Cut      :: %d%n", clusterEdgeCount);
+        }
+        System.out.printf("%n");
+        System.out.printf("\tCluster Pair Cuts%n");
+        System.out.printf("\t\tTotal Pairs Processed        :: %d%n", allPairs);
+        System.out.printf("\t\tPassed Energy Sum Cut        :: %d%n", pairEnergySumCount);
+        System.out.printf("\t\tPassed Energy Invariant Mass :: %d%n", pairInvariantMassCount);
+        System.out.printf("%n");
+        System.out.printf("\tTrigger Count :: %d%n", triggers);
+        
+        // Run the superclass method.
         super.endOfData();
     }
-	
-	public void process(EventHeader event) {
-		// Generate a temporary list to store the good clusters
-		// in before they are added to the buffer.
-		List<HPSEcalCluster> tempList = new ArrayList<HPSEcalCluster>();
-		
-		// If the current event has a cluster collection, get it.
-		if(event.hasCollection(HPSEcalCluster.class, clusterCollectionName)) {
-			// VERBOSE :: Note that a cluster collection exists for
-			//            this event.
-			if(verbose) { System.out.println("Cluster collection is present for event."); }
-			
-			// Get the cluster list from the event.
-			List<HPSEcalCluster> eventList = event.get(HPSEcalCluster.class, clusterCollectionName);
-			
-			// VERBOSE :: Output the number of extant clusters.
-			if(verbose) { System.out.printf("%d clusters in event.%n", eventList.size()); }
-			
-			// Add the clusters from the event into the cluster list
-			// if they pass the minimum total cluster energy and seed
-			// energy thresholds.
-			for(HPSEcalCluster cluster : eventList) {
-				// Increment the clusters processed count.
-				allClusters++;
-				
-				// Plot the seed energy / cluster energy histogram.
-				seedPercent.fill(cluster.getSeedHit().getCorrectedEnergy() / cluster.getEnergy(), 1);
-				
-				// Get the cluster position indices.
-				int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
-				int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
-				
-				// VERBOSE :: Output the current cluster's properties.
-				if(verbose) {
-					System.out.printf("\tTesting cluster at (%d, %d) with total energy %f and seed energy %f.%n",
-							ix, iy, cluster.getSeedHit().getCorrectedEnergy(), cluster.getEnergy());
-				}
-				
-				// Add the clusters to the uncut histograms.
-				clusterHitCount.fill(cluster.getCalorimeterHits().size());
-				clusterTotalEnergy.fill(cluster.getEnergy());
-				clusterSeedEnergy.fill(cluster.getSeedHit().getCorrectedEnergy());
-				clusterDistribution.fill(ix, iy, 1);
-				
-				// VERBOSE :: Output the single cluster trigger thresholds.
-				if(verbose) {
-					System.out.printf("\tCluster seed energy threshold  :: [%f, %f]%n", clusterSeedEnergyThresholdLow, clusterSeedEnergyThresholdHigh);
-					System.out.printf("\tCluster total energy threshold :: %f%n%n", clusterTotalEnergyThresholdLow);
-				}
-				
-				// Perform the single cluster cuts.
-				boolean totalEnergyCut = clusterTotalEnergyCut(cluster);
-				boolean seedEnergyCut = clusterSeedEnergyCut(cluster);
-				boolean hitCountCut = clusterHitCountCut(cluster);
-				boolean edgeCrystalCut = isEdgeCluster(cluster);
-				
-				// Increment the single cut counts.
-				if(seedEnergyCut) {
-					clusterSeedEnergyCount++;
-					if(hitCountCut) {
-						clusterHitCountCount++;
-						if(rejectEdgeCrystals && edgeCrystalCut) {
-							clusterEdgeCount++;
-						}
-					}
-				}
-				
-				// VERBOSE :: Note whether the cluster passed the single
-				//            cluster cuts.
-				if(verbose) {
-					System.out.printf("\tPassed seed energy cut    :: %b%n", seedEnergyCut);
-					System.out.printf("\tPassed cluster energy cut :: %b%n%n", totalEnergyCut);
-					System.out.printf("\tPassed hit count cut :: %b%n%n", hitCountCut);
-					System.out.printf("\tIs an edge cluster :: %b%n%n", edgeCrystalCut);
-				}
-				
-				// Determine whether the cluster passes all the single
-				// cluster cuts.
-				boolean passedCuts = false;
-				
-				// If edge crystals should be not be used for triggering,
-				// require that the cluster not be centered in an edge
-				// crystal.
-				if(rejectEdgeCrystals) {
-					if(totalEnergyCut && seedEnergyCut && hitCountCut && !edgeCrystalCut) {
-						passedCuts = true;
-					}
-				}
-				
-				// Otherwise, it just needs to pass the standard trigger
-				// cuts regardless of where it is located.
-				else {
-					if(totalEnergyCut && seedEnergyCut && hitCountCut) {
-						passedCuts = true;
-					}
-				}
-				
-				// If both pass, add the cluster to the list.
-				if(passedCuts) {
-					// Add the cluster to the cluster list.
-					tempList.add(cluster);
-					
-					// Add the cluster information to the single cut histograms.
-					pClusterHitCount.fill(cluster.getCalorimeterHits().size());
-					pClusterTotalEnergy.fill(cluster.getEnergy());
-					pClusterSeedEnergy.fill(cluster.getSeedHit().getCorrectedEnergy());
-					pClusterDistribution.fill(ix, iy, 1);
-				}
-			}
-			
-			// Remove the oldest cluster buffer element and add the new
-			// cluster list to the buffer.
-			clusterBuffer.removeFirst();
-			clusterBuffer.addLast(tempList);
-		}
-		
-		// Otherwise, clear the cluster list.
-		else {
-			// VERBOSE :: Note that the event has no clusters.
-			if(verbose) { System.out.println("No cluster collection is present for event.\n"); }
-		}
-		
-		// Reset the highest energy pair to null.
-		clusterTriplet[0] = null;
-		clusterTriplet[1] = null;
-		clusterTriplet[2] = null;
-		
-		// Loop over all of the cluster lists in the cluster buffer.
-		double[] energy = { 0.0, 0.0, 0.0 };
-		for(List<HPSEcalCluster> bufferList : clusterBuffer) {
-			// Loop over all of the clusters in each buffer list.
-			for(HPSEcalCluster cluster : bufferList) {
-				// If the new cluster is higher energy than the first
-				// slot cluster, move the subsequent clusters down and
-				// insert the new one.
-				if(cluster.getEnergy() > energy[0]) {
-					clusterTriplet[2] = clusterTriplet[1];
-					clusterTriplet[1] = clusterTriplet[0];
-					clusterTriplet[0] = cluster;
-					energy[2] = energy[1];
-					energy[1] = energy[0];
-					energy[0] = cluster.getEnergy();
-				}
-				
-				// Otherwise, if the new cluster has more energy than
-				// the second slot, it goes there and the second does
-				// to the third.
-				else if(cluster.getEnergy() > energy[1]) {
-					clusterTriplet[2] = clusterTriplet[1];
-					clusterTriplet[1] = cluster;
-					energy[2] = energy[1];
-					energy[1] = cluster.getEnergy();
-				}
-				
-				// If the new cluster has more energy than the third
-				// cluster, it just replaces it.
-				else if(cluster.getEnergy() > energy[2]) {
-					clusterTriplet[2] = cluster;
-					energy[2] = cluster.getEnergy();
-				}
-			}
-		}
-		
-		// The highest energy pair is the same as the first two slots
-		// of the highest energy triplet.
-		clusterPair[0] = clusterTriplet[0];
-		clusterPair[1] = clusterTriplet[1];
-		
-		// Run the superclass event process.
-		super.process(event);
-	}
-	
-	public void startOfData() {
-		// Initialize the cluster buffer to the size of the coincidence window.
-		clusterBuffer = new LinkedList<List<HPSEcalCluster>>();
-		
-		// Populate the buffer with empty lists.
-		for(int i = 0; i < coincidenceWindow; i++) {
-			clusterBuffer.add(new ArrayList<HPSEcalCluster>(0));
-		}
-		
-		// Initialize the cluster hit count diagnostic plots.
-		clusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution", 9, 1, 10);
-		pClusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed Single Cuts)", 9, 1, 10);
-		aClusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed All Cuts)", 9, 1, 10);
-		
-		// Initialize the cluster total energy diagnostic plots.
-		clusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution", 176, 0.0, 2.2);
-		pClusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
-		aClusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
-		
-		// Initialize the cluster seed energy diagnostic plots.
-		clusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution", 176, 0.0, 2.2);
-		pClusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
-		aClusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
-		
-		// Initialize the seed distribution diagnostic plots.
-		clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 44, -22.0, 22.0, 10, -5, 5);
-		pClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 44, -23, 23, 11, -5.5, 5.5);
-		aClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 44, -23, 23, 11, -5.5, 5.5);
-		
-		// Initialize the cluster pair energy sum diagnostic plots.
-		pairEnergySum = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution", 176, 0.0, 2.2);
-		pPairEnergySum = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution (Passed Pair Cuts)", 176, 0.0, 2.2);
-		
-		// Initialize the cluster pair hypothetical invariant mass diagnostic plots.
-		invariantMass = aida.histogram1D("Trigger Plots :: Invariant Mass Distribution", 1500, 0.0, 0.03);
-		pInvariantMass = aida.histogram1D("Trigger Plots :: Invariant Mass Distribution (Passed Pair Cuts)", 1500, 0.0, 0.03);
-		
-		// Initialize the seed percentage of cluster energy.
-		seedPercent = aida.histogram1D("Analysis Plots :: Seed Percentage of Total Energy", 400, 0.0, 1.0);
-	}
-	
-	protected boolean triggerDecision(EventHeader event) {
-		// If the active cluster pair has a null value, then there were
-		// fewer than two clusters in the buffer and we can not trigger.
-		if(!useClusterTriplet && (clusterPair[0] == null || clusterPair[1] == null)) {
-			// VERBOSE :: Note that triggering failed due to insufficient
-			// clusters. in the cluster buffer.
-			if(verbose) { System.out.println("Inufficient clusters in buffer -- no trigger."); }
-			
-			// Return false; we can not trigger without two clusters.
-			return false;
-		}
-		
-		// If the active cluster triplet has a null value, then there
-		// were fewer than three clusters in the buffer and we can not
-		// trigger.
-		if(useClusterTriplet && (clusterTriplet[0] == null || clusterTriplet[1] == null || clusterTriplet[2] == null)) {
-			// VERBOSE :: Note that triggering failed due to insufficient
-			// clusters. in the cluster buffer.
-			if(verbose) { System.out.println("Inufficient clusters in buffer -- no trigger."); }
-			
-			// Return false; we can not trigger without three clusters.
-			return false;
-		}
-		
-		// Increment the number of pairs considered.
-		allPairs++;
-		
-		// Get the cluster position indices.
-		int[] ix = { clusterPair[0].getSeedHit().getIdentifierFieldValue("ix"), clusterPair[1].getSeedHit().getIdentifierFieldValue("ix") };
-		int[] iy = { clusterPair[0].getSeedHit().getIdentifierFieldValue("iy"), clusterPair[1].getSeedHit().getIdentifierFieldValue("iy") };
-		
-		// VERBOSE :: Output the clusters selected for triggering.
-		if(verbose) {
-			System.out.printf("\tTesting first cluster at (%d, %d) with total energy %f and seed energy %f.%n",
-					ix[0], iy[0], clusterPair[0].getSeedHit().getCorrectedEnergy(), clusterPair[0].getEnergy());
-			System.out.printf("\tTesting second cluster at (%d, %d) with total energy %f and seed energy %f.%n",
-					ix[1], iy[1], clusterPair[1].getSeedHit().getCorrectedEnergy(), clusterPair[1].getEnergy());
-			if(useClusterTriplet) {
-				System.out.printf("\tTesting third cluster at (%d, %d) with total energy %f and seed energy %f.%n",
-						ix[1], iy[1], clusterTriplet[2].getSeedHit().getCorrectedEnergy(), clusterTriplet[2].getEnergy());
-			}
-		}
-		
-		if(!useClusterTriplet) {
-			// Fill the uncut histograms.
-			pairEnergySum.fill(getEnergySumValue(clusterPair));
-			invariantMass.fill(getInvariantMassValue(clusterPair));
-			
-			// VERBOSE :: Output the cluster pair trigger thresholds.
-			if(verbose) {
-				System.out.printf("\tCluster pair energy sum threshold     :: %f%n", pairEnergySumThresholdLow);
-				System.out.printf("\tHypothetical invariant mass threshold :: [%f, %f]%n%n", invariantMassThresholdLow, invariantMassThresholdHigh);
-			}
-			
-			// Perform the cluster pair checks.
-			boolean energySumCut = pairEnergySumCut(clusterPair);
-			boolean invariantMassCut = pairInvariantMassCut(clusterPair);
-			
-			// Increment the pair cut counts.
-			if(energySumCut) {
-				pairEnergySumCount++;
-				if(invariantMassCut) {
-					pairInvariantMassCount++;
-				}
-			}
-			
-			// VERBOSE :: Note the outcome of the trigger cuts.
-			if(verbose) {
-				System.out.printf("\tPassed energy sum cut     :: %b%n", energySumCut);
-				System.out.printf("\tPassed invariant mass cut :: %b%n%n", invariantMassCut);
-			}
-			
-			// If the pair passes both cuts, we have a trigger.
-			if(energySumCut && invariantMassCut) {
-				// Fill the cut histograms.
-				pPairEnergySum.fill(getEnergySumValue(clusterPair));
-				pInvariantMass.fill(getInvariantMassValue(clusterPair));
-				
-				// Fill the all cuts histograms.
-				aClusterHitCount.fill(clusterPair[0].getCalorimeterHits().size());
-				aClusterHitCount.fill(clusterPair[1].getCalorimeterHits().size());
-				aClusterTotalEnergy.fill(clusterPair[0].getEnergy());
-				aClusterTotalEnergy.fill(clusterPair[1].getEnergy());
-				aClusterSeedEnergy.fill(clusterPair[0].getSeedHit().getCorrectedEnergy());
-				aClusterSeedEnergy.fill(clusterPair[1].getSeedHit().getCorrectedEnergy());
-				aClusterDistribution.fill(ix[0], iy[0], 1);
-				aClusterDistribution.fill(ix[1], iy[1], 1);
-				
-				// VERBOSE :: Note that the event has triggered.
-				if(verbose) { System.out.println("Event triggers!\n\n"); }
-				
-				// Increment the number of triggers.
-				triggers++;
-				
-				// Return the trigger.
-				return true;
-			}
-		}
-		
-		// If we are using a cluster triplet, apply the cluster triplet
-		// cuts.
-		else {
-			// Perform the cluster triplet checks.
-			boolean energySumCut = tripletEnergySumCut(clusterTriplet);
-			boolean horizontalCut = tripletHorizontalCut(clusterTriplet);
-			boolean energySpatialCut = tripletTotalEnergyCut(clusterTriplet);
-			
-			// Fill the all cuts histograms.
-			aClusterHitCount.fill(clusterPair[0].getCalorimeterHits().size());
-			aClusterHitCount.fill(clusterPair[1].getCalorimeterHits().size());
-			aClusterTotalEnergy.fill(clusterPair[0].getEnergy());
-			aClusterTotalEnergy.fill(clusterPair[1].getEnergy());
-			aClusterSeedEnergy.fill(clusterPair[0].getSeedHit().getCorrectedEnergy());
-			aClusterSeedEnergy.fill(clusterPair[1].getSeedHit().getCorrectedEnergy());
-			aClusterDistribution.fill(ix[0], iy[0], 1);
-			aClusterDistribution.fill(ix[1], iy[1], 1);
-			
-			if(energySumCut && horizontalCut && energySpatialCut) {
-				return true;
-			}
-		}
-		
-		// VERBOSE :: Note that the event has failed to trigger.
-		if(verbose) { System.out.println("No trigger.\n\n"); }
-		
-		// If one or more of the pair cuts failed, the we do not trigger.
-		return false;
-	}
-	
-	// ==================================================================
-	// ==== Trigger Cut Methods =========================================
-	// ==================================================================
-	
-	/**
-	 * Checks whether the cluster passes the threshold for minimum
-	 * number of component hits.
-	 * @param cluster - The cluster to check.
-	 * @return Returns <code>true</code> if the cluster passes and <code>
-	 * false</code> if it does not.
-	 */
-	private boolean clusterHitCountCut(HPSEcalCluster cluster) {
-		return cluster.getCalorimeterHits().size() >= clusterHitCountThreshold;
-	}
-	
-	/**
-	 * Checks whether the cluster falls within the allowed range for
-	 * the seed hit energy cut.
-	 * @param cluster - The cluster to check.
-	 * @return Returns <code>true</code> if the cluster passes and <code>
-	 * false</code> if it does not.
-	 */
-	private boolean clusterSeedEnergyCut(HPSEcalCluster cluster) {
-		// Get the seed energy value.
-		double seedEnergy = cluster.getSeedHit().getCorrectedEnergy();
-		
-		// Perform the seed energy cut.
-		return seedEnergy >= clusterSeedEnergyThresholdLow && seedEnergy <= clusterSeedEnergyThresholdHigh;
-	}
-	
-	/**
-	 * Checks whether the cluster passes the threshold for minimum
-	 * total cluster energy.
-	 * @param cluster - The cluster to check.
-	 * @return Returns <code>true</code> if the cluster passes and <code>
-	 * false</code> if it does not.
-	 */
-	private boolean clusterTotalEnergyCut(HPSEcalCluster cluster) {
-		// Get the cluster energy.
-		double clusterEnergy = cluster.getEnergy();
-		
-		// Perform the cut.
-		return clusterEnergy >= clusterTotalEnergyThresholdLow && clusterEnergy <= clusterTotalEnergyThresholdHigh;
-	}
-	
-	/**
-	 * Calculates the value used in the pair energy sum cut from a pair
-	 * of two clusters.
-	 * @param clusterPair - The cluster pair from which to derive the
-	 * cut value.
-	 * @return Returns the cut value as a <code>double</code>.
-	 */
-	private static double getEnergySumValue(HPSEcalCluster[] clusterGroup) {
-		// Track the sum.
-		double energySum = 0.0;
-		
-		// Add the energies of all clusters in the array.
-		for(HPSEcalCluster cluster : clusterGroup) { energySum += cluster.getEnergy(); }
-		
-		// Return the sum.
-		return energySum;
-	}
-	
-	/**
-	 * Calculates the value used in the invariant mass cut from a pair
-	 * of two clusters.
-	 * @param clusterPair - The cluster pair from which to derive the
-	 * cut value.
-	 * @return Returns the cut value as a <code>double</code>.
-	 */
-	private double getInvariantMassValue(HPSEcalCluster[] clusterPair) {
-		// Store the x/y positions for the seeds.
-		double x[] = new double[2];
-		double y[] = new double[2];
-		
-		// Get the seed hits.
-		CalorimeterHit[] seed = { clusterPair[0].getSeedHit(), clusterPair[1].getSeedHit() };
-		
-		// Set the positions for each seed.
-		for(int index = 0; index < seed.length; index++) {
-			// Get the seed position array stored in the position map.
-			Double[] seedPos = seedPosMap.get(clusterPair[index].getSeedHit());
-			
-			// If there is a position array for the seed, use it.
-			if(seedPos != null) {
-				x[index] = seedPos[0];
-				y[index] = seedPos[1];
-			}
-			
-			// Otherwise, calculate the position at the crystal face.
-			else {
-				// Get the position and store it in a double array.
-				IGeometryInfo geom = clusterPair[index].getSeedHit().getDetectorElement().getGeometry();
-				double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),
-						(Hep3Vector) new BasicHep3Vector(0, 0, -1 * ((Trd) geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
-				
-				// Set the seed location.
-				x[index] = pos[0];
-				y[index] = pos[1];
-				
-				// Store the seed location for future use.
-				Double[] positionVec = { pos[0], pos[1], pos[2] };
-				seedPosMap.put(clusterPair[index].getSeedHit(), positionVec);
-			}
-		}
-		
-		// Get the cluster energy for each seed.
-		double[] e = { clusterPair[0].getEnergy(), clusterPair[1].getEnergy() };
-		
-		//Return the invariant mass.
-		return (e[0] * e[1] * (Math.pow(x[0] - x[1], 2) + Math.pow(y[0] - y[1], 2)) / D2);
-	}
-	
-	/**
-	 * Indicates whether a cluster has a seed hit located on the edge
-	 * of the calorimeter or not.
-	 * 
-	 * @param cluster - The cluster to check.
-	 * @return Returns <code>true</code> if the cluster seed is on the
-	 * edge of the calorimeter and <code>false</code> otherwise.
-	 */
-	private static boolean isEdgeCluster(HPSEcalCluster cluster) {
-		// Get the x- and y-indices of the cluster seed hit.
-		int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
-		int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
-		
-		// Track whether the cluster is an edge cluster or not.
-    	boolean edge = false;
-    	
-    	// Get the absolute values of the coordinates.
-    	int aix = Math.abs(ix);
-    	int aiy = Math.abs(iy);
-    	
-    	// Check if this an outer edge crystal.
-    	if(aix == 23 || aiy == 5) { edge = true; }
-    	
-    	// Check if this along the central beam gap.
-    	if(aiy == 1) { edge = true; }
-    	
-    	// Check if this is around the beam gap.
-    	if(aiy == 2 && (ix >= -11 && ix <= -1)) { edge = true; }
-    	
-    	// Otherwise, this is not an edge crystal.
-    	return edge;
-	}
-	
-	/**
-	 * Checks whether the cluster pair passes the falls within the
-	 * allowed range for the piar energy sum cut.
-	 * @param clusterPair - An array of size two containing the cluster
-	 * pair to check.
-	 * @return Returns <code>true</code> if the clusters pass and <code>
-	 * false</code> if they does not.
-	 */
-	private boolean pairEnergySumCut(HPSEcalCluster[] clusterPair) {
-		// Get the energy sum value.
-		double energySum = getEnergySumValue(clusterPair);
-		
-		// Otherwise, get the energy sum and compare it to the threshold.
-		return energySum >= pairEnergySumThresholdLow && energySum <= pairEnergySumThresholdHigh;
-	}
-	
-	/**
-	 * Checks whether the cluster pair passes the threshold for the
-	 * invariant mass check.
-	 * @param clusterPair - An array of size two containing the cluster
-	 * pair to check.
-	 * @return Returns <code>true</code> if the clusters pass and <code>
-	 * false</code> if they does not.
-	 */
-	private boolean pairInvariantMassCut(HPSEcalCluster[] clusterPair) {
-		// Calculate the invariant mass.
-		double myy2 = getInvariantMassValue(clusterPair);
-		
-		// Perform the cut.
-		return ( (myy2 >= invariantMassThresholdLow) && (myy2 <= invariantMassThresholdHigh));
-	}
-	
-	/**
-	 * Checks whether the cluster pair passes the threshold for the
-	 * minimum pair energy sum check.
-	 * @param clusterTriplet - An array of size three containing the
-	 * cluster triplet to check.
-	 * @return Returns <code>true</code> if the clusters pass and <code>
-	 * false</code> if they does not.
-	 */
-	private boolean tripletEnergySumCut(HPSEcalCluster[] clusterTriplet) {
-		return (getEnergySumValue(clusterTriplet) >= tripletEnergySumThreshold);
-	}
-	
-	/**
-	 * Checks that there is at least one cluster is located on the right
-	 * side and at least one cluster on the left side of the calorimeter.
-	 * @param clusterTriplet - An array of size three containing the
-	 * cluster triplet to check.
-	 * @return Returns <code>true</code> if the clusters pass and <code>
-	 * false</code> if they does not.
-	 */
-	private static boolean tripletHorizontalCut(HPSEcalCluster[] clusterTriplet) {
-		// Track whether a cluster has occurred on each horizontal side
-		// of the calorimeter.
-		boolean leftCluster = false;
-		boolean rightCluster = false;
-		
-		// Sort through the cluster triplet and check where they occur.
-		for(HPSEcalCluster cluster : clusterTriplet) {
-			int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
-			if(ix < 0) { leftCluster = true; }
-			if(ix > 0) { rightCluster = true; }
-		}
-		
-		// If a cluster fell on both sides, it passes.
-		if(leftCluster && rightCluster) { return true; }
-		else { return false; }
-	}
-	
-	private boolean tripletTotalEnergyCut(HPSEcalCluster[] clusterTriplet) {
-		// Check to see if each cluster passes the check.
-		for(HPSEcalCluster cluster1 : clusterTriplet) {
-			for(HPSEcalCluster cluster2 : clusterTriplet) {
-				// The cluster pair must be two different clusters.
-				if(cluster1 == cluster2) { continue; }
-				
-				// Check to see if the clusters are over threshold.
-				boolean over1 = cluster1.getEnergy() >= tripletTotalEnergyThreshold;
-				boolean over2 = cluster1.getEnergy() >= tripletTotalEnergyThreshold;
-				
-				// If both the clusters are over threshold, check that
-				// they are sufficiently far apart.
-				if(over1 && over2) {
-					// Get the x and y coordinates of the clusters.
-					double x[] = { cluster1.getPosition()[0], cluster2.getPosition()[0] };
-					double y[] = { cluster1.getPosition()[1], cluster2.getPosition()[1] };
-					
-					// Calculate the distance between the clusters.
-					double dr = Math.sqrt(x[0] * x[0] + y[0] * y[0]);
-					
-					// Run the check.
-					if(dr >= tripletPairSeparationThreshold) { return true; }
-				}
-			}
-		}
-		
-		// If none of the cluster pairs pass all the checks, the
-		// triplet fails.
-		return false;
-	}
-	
-	// ==================================================================
-	// ==== Variables Mutator Methods ===================================
-	// ==================================================================
-	
-	/**
-	 * Sets the LCIO collection name where <code>HPSEcalCluster</code>
-	 * objects are stored for use in the trigger.
-	 * @param clusterCollectionName - The name of the LCIO collection.
-	 */
-	public void setClusterCollectionName(String clusterCollectionName) {
-		this.clusterCollectionName = clusterCollectionName;
-	}
-	
-	/**
-	 * Sets the minimum number of hits required for a cluster to be
-	 * used in triggering.
-	 * @param clusterHitCountThreshold - The smallest number of hits
-	 * in a cluster.
-	 */
-	public void setClusterHitCountThreshold(int clusterHitCountThreshold) {
-		this.clusterHitCountThreshold = clusterHitCountThreshold;
-	}
-	
-	/**
-	 * Sets the threshold for the cluster seed energy of individual
-	 * clusters above which the cluster will be rejected and not used
-	 * for triggering.
-	 * @param clusterSeedEnergyThresholdHigh - The cluster seed energy
-	 * lower bound.
-	 */
-	public void setClusterSeedEnergyThresholdHigh(double clusterSeedEnergyThresholdHigh) {
-		this.clusterSeedEnergyThresholdHigh = clusterSeedEnergyThresholdHigh;
-	}
-	
-	/**
-	 * Sets the threshold for the cluster seed energy of individual
-	 * clusters under which the cluster will be rejected and not used
-	 * for triggering.
-	 * @param clusterSeedEnergyThresholdLow - The cluster seed energy
-	 * lower bound.
-	 */
-	public void setClusterSeedEnergyThresholdLow(double clusterSeedEnergyThresholdLow) {
-		this.clusterSeedEnergyThresholdLow = clusterSeedEnergyThresholdLow;
-	}
-	
-	/**
-	 * Sets the threshold for the total cluster energy of individual
-	 * clusters under which the cluster will be rejected and not used
-	 * for triggering.
-	 * @param clusterTotalEnergyThresholdLow - The cluster total energy
-	 * lower bound.
-	 */
-	public void setClusterTotalEnergyThresholdLow(double clusterTotalEnergyThresholdLow) {
-		this.clusterTotalEnergyThresholdLow = clusterTotalEnergyThresholdLow;
-	}
-	
-	/**
-	 * Sets the threshold for the total cluster energy of individual
-	 * clusters above which the cluster will be rejected and not used
-	 * for triggering.
-	 * @param clusterTotalEnergyThresholdHigh - The cluster total energy
-	 * upper bound.
-	 */
-	public void setClusterTotalEnergyThresholdHigh(double clusterTotalEnergyThresholdHigh) {
-		this.clusterTotalEnergyThresholdHigh = clusterTotalEnergyThresholdHigh;
-	}
-	
-	/**
-	 * Sets the number of events that clusters will be retained and
-	 * employed for triggering before they are cleared.
-	 * @param coincidenceWindow - The number of events that clusters
-	 * should be retained.
-	 */
-	public void setCoincidenceWindow(int coincidenceWindow) {
-		this.coincidenceWindow = coincidenceWindow;
-	}
-	
-	/**
-	 * Sets the invariant mass threshold to accept only cluster pairs
-	 * with a reconstructed invariant mass within a certain number of
-	 * standard deviations of the mean (corrected for sampling fraction).
-	 * @param invariantMassSigma - The number of standard deviations
-	 * within which a cluster pair invariant mass is accepted.
-	 */
-	public void setInvariantMassSigma(int invariantMassSigma) {
-		this.invariantMassThresholdLow = 0.012499 - (invariantMassSigma * 0.0011095);
-		this.invariantMassThresholdHigh = 0.012499 + (invariantMassSigma * 0.0011095);
-	}
-	
-	/**
-	 * Sets the threshold for the calculated invariant mass of the
-	 * generating particle (assuming that the clusters are produced
-	 * by a positron/electron pair) above which the cluster pair will
-	 * be rejected and not produce a trigger.
-	 * @param invariantMassThresholdHigh - The invariant mass upper
-	 * bound.
-	 */
-	public void setInvariantMassThresholdHigh(double invariantMassThresholdHigh) {
-		this.invariantMassThresholdHigh = invariantMassThresholdHigh;
-	}
-	
-	/**
-	 * Sets the threshold for the calculated invariant mass of the
-	 * generating particle (assuming that the clusters are produced
-	 * by a positron/electron pair) under which the cluster pair will
-	 * be rejected and not produce a trigger.
-	 * @param invariantMassThresholdLow - The invariant mass lower
-	 * bound.
-	 */
-	public void setInvariantMassThresholdLow(double invariantMassThresholdLow) {
-		this.invariantMassThresholdLow = invariantMassThresholdLow;
-	}
-	
-	/**
-	 * Sets the threshold for the sum of the energies of a cluster pair
-	 * above which the pair will be rejected and not produce a trigger.
-	 * @param pairEnergySumThresholdHigh - The cluster pair energy sum
-	 * upper bound.
-	 */
-	public void setPairEnergySumThresholdHigh(double pairEnergySumThresholdHigh) {
-		this.pairEnergySumThresholdHigh = pairEnergySumThresholdHigh;
-	}
-	
-	/**
-	 * Sets the threshold for the sum of the energies of a cluster pair
-	 * under which the pair will be rejected and not produce a trigger.
-	 * @param pairEnergySumThresholdLow - The cluster pair energy sum
-	 * lower bound.
-	 */
-	public void setPairEnergySumThresholdLow(double pairEnergySumThresholdLow) {
-		this.pairEnergySumThresholdLow = pairEnergySumThresholdLow;
-	}
-	
-	/**
-	 * Sets whether clusters centered on an edge crystal should be
-	 * used for triggering or not.
-	 * 
-	 * @param rejectEdgeCrystals - <code>true</code> means that edge
-	 * clusters will not be used and <code>false</code> means that they
-	 * will be used.
-	 */
-	public void setRejectEdgeCrystals(boolean rejectEdgeCrystals) {
-		this.rejectEdgeCrystals = rejectEdgeCrystals;
-	}
-	
-	/**
-	 * Sets the threshold for the sum of the energies of a cluster triplet
-	 * under which the triplet will be rejected and not produce a trigger.
-	 * @param tripletEnergySumThreshold - The cluster triplet energy sum
-	 * lower bound.
-	 */
-	public void setTripletEnergySumThreshold(double tripletEnergySumThreshold) {
-		this.tripletEnergySumThreshold = tripletEnergySumThreshold;
-	}
-	
-	/**
-	 * Sets the minimum distance apart for a cluster pair within a
-	 * cluster triplet. Clusters that are not sufficiently far apart
-	 * are rejected and do not trigger. 
-	 * @param tripletPairSeparationThreshold - The minimum distance in
-	 * millimeters.
-	 */
-	public void setTripletPairSeparationThreshold(double tripletPairSeparationThreshold) {
-		this.tripletPairSeparationThreshold = tripletPairSeparationThreshold;
-	}
-	
-	/**
-	 * Sets the threshold for which at least two clusters in a cluster
-	 * triplet will be required to surpass. Cluster triplets with one
-	 * or fewer clusters above the threshold will be rejected.
-	 * @param tripletTotalEnergyThreshold - The cluster total energy
-	 * that two clusters must pass.
-	 */
-	public void setTripletTotalEnergyThreshold(double tripletTotalEnergyThreshold) {
-		this.tripletTotalEnergyThreshold = tripletTotalEnergyThreshold;
-	}
-	
-	/**
-	 * Toggles whether the driver will output its actions to the console
-	 * during run time or not.
-	 * @param verbose - <code>true</code> indicates that the console
-	 * will write its actions and <code>false</code> that it will not.
-	 */
-	public void setVerbose(boolean verbose) {
-		this.verbose = verbose;
-	}
-	
-	/**
-	 * Toggles whether the driver triggers off of a pair of clusters
-	 * or a triplet of clusters.
-	 * @param useClusterTriplet - <code>true</code> indicates that a
-	 * triplet should be used and <code>false</code> that a pair should
-	 * be used.
-	 */
-	public void setUseClusterTriplet(boolean useClusterTriplet) {
-		this.useClusterTriplet = useClusterTriplet;
-	}
-	
-	// ==================================================================
-	// ==== AIDA Plots ==================================================
-	// ==================================================================
-	IHistogram2D aClusterDistribution;
-	IHistogram1D aClusterHitCount;
-	IHistogram1D aClusterSeedEnergy;
-	IHistogram1D aClusterTotalEnergy;
-	IHistogram2D clusterDistribution;
-	IHistogram1D clusterHitCount;
-	IHistogram1D clusterSeedEnergy;
-	IHistogram1D clusterTotalEnergy;
-	IHistogram1D invariantMass;
-	IHistogram1D pairEnergySum;
-	IHistogram1D pClusterHitCount;
-	IHistogram2D pClusterDistribution;
-	IHistogram1D pClusterSeedEnergy;
-	IHistogram1D pClusterTotalEnergy;
-	IHistogram1D pPairEnergySum;
-	IHistogram1D pInvariantMass;
-	IHistogram1D seedPercent;
-	
-	// ==================================================================
-	// ==== Variables ===================================================
-	// ==================================================================
-	
-	/**
-	 * <b>aida</b><br/><br/>
-	 * <code>private AIDA <b>aida</b></code><br/><br/>
-	 * Factory for generating histograms.
-	 */
-	private AIDA aida = AIDA.defaultInstance();
-	
-	/**
-	 * <b>clusterBuffer</b><br/><br/>
-	 * <code>private LinkedList<List<HPSEcalCluster>> <b>clusterBuffer</b></code><br/><br/>
-	 * Stores the list of clusters from each event for a finite-sized
-	 * buffer. The size of the buffer is determined by the coincidence
-	 * window.
-	 */
-	private LinkedList<List<HPSEcalCluster>> clusterBuffer;
-	
-	/**
-	 * <b>clusterCollectionName</b><br/><br/>
-	 * <code>private String <b>clusterCollectionName</b></code><br/><br/>
-	 * The name of the LCIO collection containing <code>HPSEcalCluster
-	 * </code> objects.
-	 */
-	private String clusterCollectionName = "EcalClusters";
-	
-	/**
-	 * <b>clusterPair</b><br/><br/>
-	 * <code>private HPSEcalCluster[] <b>clusterPair</b></code><br/><br/>
-	 * Stores the two highest energy clusters located in the cluster
-	 * buffer. These are sorted by energy, with the highest energy
-	 * cluster first in the array.
-	 */
-	private HPSEcalCluster[] clusterPair = new HPSEcalCluster[2];
-	
-	/**
-	 * <b>clusterHitCountThreshold</b><br/><br/>
-	 * <code>private int <b>clusterHitCountThreshold</b></code><br/><br/>
-	 * Defines the minimum number of hits required for a cluster to
-	 * be used in triggering.
-	 */
-	private int clusterHitCountThreshold = 5;
-	
-	/**
-	 * <b>clusterSeedEnergyThresholdLow</b><br/><br/>
-	 * <code>private double <b>clusterSeedEnergyThresholdLow</b></code><br/><br/>
-	 * Defines the threshold for the cluster seed energy under which
-	 * a cluster will be rejected.
-	 */
-	private double clusterSeedEnergyThresholdLow = 0.15;
-	
-	/**
-	 * <b>clusterSeedEnergyThresholdHigh</b><br/><br/>
-	 * <code>private double <b>clusterSeedEnergyThresholdHigh</b></code><br/><br/>
-	 * Defines the threshold for the cluster seed energy above which
-	 * a cluster will be rejected.
-	 */
-	private double clusterSeedEnergyThresholdHigh = 1.00;
-	
-	/**
-	 * <b>clusterTotalEnergyThresholdLow</b><br/><br/>
-	 * <code>private double <b>clusterTotalEnergyThreshold</b></code><br/><br/>
-	 * Defines the threshold for the total cluster energy under which
-	 * a cluster will be rejected.
-	 */
-	private double clusterTotalEnergyThresholdLow = 0.0;
-	
-	/**
-	 * <b>clusterTotalEnergyThresholdHigh</b><br/><br/>
-	 * <code>private double <b>clusterTotalEnergyThresholdHigh</b></code><br/><br/>
-	 * Defines the threshold for the total cluster energy above which
-	 * a cluster will be rejected.
-	 */
-	private double clusterTotalEnergyThresholdHigh = Double.MAX_VALUE;
-	
-	/**
-	 * <b>clusterTriplet</b><br/><br/>
-	 * <code>private HPSEcalCluster[] <b>clusterTriplet</b></code><br/><br/>
-	 * Stores the three highest energy clusters located in the cluster
-	 * buffer. These are sorted by energy, with the highest energy
-	 * cluster first in the array.
-	 */
-	private HPSEcalCluster[] clusterTriplet = new HPSEcalCluster[3]; 
-	
-	/**
-	 * <b>coincidenceWindow</b><br/><br/>
-	 * <code>private int <b>coincidenceWindow</b></code><br/><br/>
-	 * The number of events for which clusters will be retained and
-	 * used in the trigger before they are removed.
-	 */
-	private int coincidenceWindow = 3;
-	
-	/**
-	 * <b>D2</b><br/><br/>
-	 * <code>private static final double <b>D2</b></code><br/><br/>
-	 * The squared distance of the calorimeter from the target.
-	 */
-	private static final double D2 = 1414 * 1414; // (1414^2 mm^2)
-	
-	/**
-	 * <b>invariantMassThresholdHigh</b><br/><br/>
-	 * <code>private double <b>invariantMassThresholdHigh</b></code><br/><br/>
-	 * Defines the threshold for the invariant mass of the generating
-	 * particle above which the cluster pair will be rejected.
-	 */
-	private double invariantMassThresholdHigh = 0.01472;
-	
-	/**
-	 * <b>invariantMassThresholdLow</b><br/><br/>
-	 * <code>private double <b>invariantMassThresholdLow</b></code><br/><br/>
-	 * Defines the threshold for the invariant mass of the generating
-	 * particle below which the cluster pair will be rejected.
-	 */
-	private double invariantMassThresholdLow = 0.01028;
-	
-	/**
-	 * <b>pairEnergySumThresholdLow</b><br/><br/>
-	 * <code>private double <b>pairEnergySumThresholdLow</b></code><br/><br/>
-	 * Defines the threshold for the sum of the energies of a cluster
-	 * pair below which the pair will be rejected.
-	 */
-	private double pairEnergySumThresholdLow = 1.5;
-	
-	/**
-	 * <b>pairEnergySumThresholdHigh</b><br/><br/>
-	 * <code>private double <b>pairEnergySumThresholdHigh</b></code><br/><br/>
-	 * Defines the threshold for the sum of the energies of a cluster
-	 * pair above which the pair will be rejected.
-	 */
-	private double pairEnergySumThresholdHigh = 1.8;
-	
-	/**
-	 * <b>rejectEdgeCrystals</b><br/><br/>
[truncated at 1000 lines; 1065 more skipped]

java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,34 +1,37 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-ecal-recon</artifactId>
     <name>ecal-recon</name>
-    <description>HPS ECAL reconstruction module</description>
-    
+    <description>ECAL reconstruction algorithms</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>
-    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-recon/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/ecal-recon/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/ecal-recon/</developerConnection>
     </scm>
-
     <dependencies>
         <dependency>
-            <groupId>org.lcsim</groupId>
-            <artifactId>lcsim-distribution</artifactId>
-            <version>${lcsimVersion}</version>
-        </dependency>
-        <dependency>
             <groupId>org.hps</groupId>
             <artifactId>hps-conditions</artifactId>
         </dependency>
     </dependencies>
-    
+    <build>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-surefire-plugin</artifactId>
+          <configuration>
+            <excludes>
+              <exclude>org/hps/recon/ECalClusterICTest.java</exclude>
+            </excludes>
+          </configuration>
+        </plugin>
+      </plugins>
+    </build>
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/src/main/java/org/hps/recon/ecal
EcalClusterIC.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/src/main/java/org/hps/recon/ecal/EcalClusterIC.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/src/main/java/org/hps/recon/ecal/EcalClusterIC.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,4 @@
 package org.hps.recon.ecal;
-
 import hep.physics.vec.BasicHep3Vector;
 import hep.physics.vec.Hep3Vector;
 import hep.physics.vec.VecOp;
@@ -16,11 +15,11 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.hps.recon.ecal.HPSEcalClusterIC;
 import org.lcsim.detector.IGeometryInfo;
 import org.lcsim.detector.solids.Trd;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.EventHeader;
-import org.lcsim.event.MCParticle;
 import org.lcsim.geometry.Detector;
 import org.lcsim.geometry.subdetector.HPSEcal3;
 import org.lcsim.geometry.subdetector.HPSEcal3.NeighborMap;
@@ -81,6 +80,37 @@
     double minTime = 0.0;
     // Maximum time cut window range. Units in ns.
     double timeWindow = 20.0;
+    // Variables for electron energy corrections
+    static final double ELECTRON_ENERGY_A = -0.0027;
+    static final double ELECTRON_ENERGY_B = -0.06;
+    static final double ELECTRON_ENERGY_C = 0.95;
+    // Variables for positron energy corrections
+    static final double POSITRON_ENERGY_A = -0.0096;
+    static final double POSITRON_ENERGY_B = -0.042;
+    static final double POSITRON_ENERGY_C = 0.94;
+    // Variables for photon energy corrections
+    static final double PHOTON_ENERGY_A = 0.0015;
+    static final double PHOTON_ENERGY_B = -0.047;
+    static final double PHOTON_ENERGY_C = 0.94;
+    // Variables for electron position corrections
+    static final double ELECTRON_POS_A = 0.0066;
+	static final double ELECTRON_POS_B = -0.03;
+	static final double ELECTRON_POS_C = 0.028;
+	static final double ELECTRON_POS_D = -0.45;
+	static final double ELECTRON_POS_E = 0.465;
+    // Variables for positron position corrections
+	static final double POSITRON_POS_A = 0.0072;
+	static final double POSITRON_POS_B = -0.031;
+	static final double POSITRON_POS_C = 0.007;
+	static final double POSITRON_POS_D = 0.342;
+	static final double POSITRON_POS_E = 0.108;
+    // Variables for photon position corrections
+	static final double PHOTON_POS_A = 0.005;
+	static final double PHOTON_POS_B = -0.032;
+	static final double PHOTON_POS_C = 0.011;
+	static final double PHOTON_POS_D = -0.037;
+	static final double PHOTON_POS_E = 0.294;
+	
     
        
     public void setClusterCollectionName(String clusterCollectionName) {
@@ -161,17 +191,9 @@
         this.timeWindow = timeWindow;
     }
     
-    // For storing MC particle list
-    public ArrayList<MCParticle> mcList = new ArrayList<MCParticle>();
-    
     // Make a map for quick calculation of the x-y position of crystal face
-    public Map<Point, Double[]> correctedPositionMap = new HashMap<Point, Double[]>();
+    public Map<Point, double[]> correctedPositionMap = new HashMap<Point, double[]>();
     
-    // MC particle list
-    public void addMCGen(MCParticle genMC){
-    	mcList.add(genMC);
-    }
-    
     public void startOfData() {
     	// Make sure that the calorimeter hit collection name is defined.
         if (ecalCollectionName == null) {
@@ -204,14 +226,6 @@
     public void process(EventHeader event) {
     	// Make sure the current event contains calorimeter hits.
         if (event.hasCollection(CalorimeterHit.class, ecalCollectionName)) {
-
-        	// Get generated hits
-            if (event.hasCollection(MCParticle.class, "MCParticle")) {
-                List<MCParticle> genPart = event.getMCParticles();
-                for(MCParticle m : genPart){
-                    mcList.add(m);
-                }        	
-            }
         	
             // Generate clusters from the calorimeter hits.
             //List<HPSEcalClusterIC> clusterList = null;
@@ -229,10 +243,10 @@
         	hitList.add(r);
         }
         
-        // Create a list to store the newly created clusters in.
+        //Create a list to store the newly created clusters in.
         ArrayList<HPSEcalClusterIC> clusterList = new ArrayList<HPSEcalClusterIC>();
         
-        // Create a list to store the rejected hits in.
+        //Create a list to store the rejected hits in.
         ArrayList<CalorimeterHit> rejectedHitList = new ArrayList<CalorimeterHit>();
         
         // Sort the list of hits by energy.
@@ -256,26 +270,23 @@
         	else { continue; }
         }
         
-    	// Create a map to connect the cell ID of a calorimeter crystal
-        // to the hit which occurred in that crystal.
+    	//Create a map to connect the cell ID of a calorimeter crystal to the hit which occurred in that crystal.
     	HashMap<Long, CalorimeterHit> hitMap = new HashMap<Long, CalorimeterHit>();
         for (CalorimeterHit hit : hitList) { hitMap.put(hit.getCellID(), hit); }
         
-        // Map a crystal to a list of all clusters in which it is a member.
+        //Map a crystal to a list of all clusters in which it is a member.
         Map<CalorimeterHit, List<CalorimeterHit>> commonHits = new HashMap<CalorimeterHit, List<CalorimeterHit>>();
-        
-        // Map a crystal to the seed of the cluster of which it is a member.
+
+        //Map a crystal to the seed of the cluster of which it is a member.
         HashMap<CalorimeterHit, CalorimeterHit> hitSeedMap = new HashMap<CalorimeterHit, CalorimeterHit>();
         
-      	// Set containing hits immediately around a seed hit.
-      	HashSet<CalorimeterHit> surrSeedSet = new HashSet<CalorimeterHit>();
-        
         // Loop through all calorimeter hits to locate seeds and perform
         // first pass calculations for component and common hits.
-        for (CalorimeterHit hit : hitList) {
+        for (int ii = 0; ii <= hitList.size() - 1; ii ++){
+        	CalorimeterHit hit = hitList.get(ii);
         	// Get the set of all neighboring crystals to the current hit.
             Set<Long> neighbors = neighborMap.get(hit.getCellID());
-            
+
             // Generate a list to store any neighboring hits in.
             ArrayList<CalorimeterHit> neighborHits = new ArrayList<CalorimeterHit>();
             
@@ -296,18 +307,24 @@
             // Loops through all the neighboring hits to determine if
             // the current hit is the local maximum within its set of
             // neighboring hits.
-            seedHitLoop:
-            for(CalorimeterHit neighbor : neighborHits) {
-            	if(!equalEnergies(hit, neighbor)) {
-            		isSeed = false;
-               		break seedHitLoop;
-            	}
-           
-            }
-            
+            	seedHitLoop:
+            		for(CalorimeterHit neighbor : neighborHits) {
+            			if(!equalEnergies(hit, neighbor)) {
+            				isSeed = false;
+            				break seedHitLoop;
+            			}
+            		}
             // If this hit is a seed hit, just map it to itself.
-            if (isSeed) { hitSeedMap.put(hit, hit); }
+            if (isSeed && hit.getCorrectedEnergy() >= seedEnergyThreshold) { hitSeedMap.put(hit, hit); }
             
+            // If this hit is a local maximum but does not pass seed threshold, 
+            // remove from hit list and do not cluster. 
+            else if (isSeed  && hit.getCorrectedEnergy() < seedEnergyThreshold){           	
+            	hitList.remove(ii);
+            	rejectedHitList.add(hit); 
+            	ii --;
+        	}
+            
             // If this hit is not a seed hit, see if it should be
             // attached to any neighboring seed hits.
             else {
@@ -343,7 +360,6 @@
                         // that it has been clustered.
                         else {
                           	hitSeedMap.put(hit, neighborHit);
-                        	surrSeedSet.add(hit);
                         }
                 	}
                 }
@@ -353,8 +369,7 @@
         // Performs second pass calculations for component hits.
         secondaryHitsLoop:
         for (CalorimeterHit secondaryHit : hitList) {
-        	// If the secondary hit is not associated with a seed, then
-        	// the rest of there is nothing further to be done.
+        	// Look for hits that already have an associated seed/clustering.
         	if(!hitSeedMap.containsKey(secondaryHit)) { continue secondaryHitsLoop; }
         	
         	// Get the secondary hit's neighboring crystals.
@@ -371,7 +386,7 @@
             	
             	// If the neighboring crystal exists and is not already
             	// in a cluster, add it to the list of neighboring hits.
-                if (secondaryNeighborHit != null && !hitSeedMap.containsKey(secondaryNeighborHit)) { //!clusteredHitSet.contains(secondaryNeighborHit)) {
+                if (secondaryNeighborHit != null && !hitSeedMap.containsKey(secondaryNeighborHit)) {
                 	secondaryNeighborHits.add(secondaryNeighborHit);
                 }
             }
@@ -381,30 +396,19 @@
             	// If the neighboring hit is of lower energy than the
             	// current secondary hit, then associate the neighboring
             	// hit with the current secondary hit's seed.
-            	
-            	//  if (secondaryNeighborHit.getCorrectedEnergy() < secondaryHit.getCorrectedEnergy()) {
-            	if(!equalEnergies(secondaryNeighborHit, secondaryHit)) {
-                	hitSeedMap.put(secondaryNeighborHit, hitSeedMap.get(secondaryHit));
-                }
+            	if(!equalEnergies(secondaryNeighborHit, secondaryHit)){
+            		hitSeedMap.put(secondaryNeighborHit, hitSeedMap.get(secondaryHit));}
             	else {continue;}
             }
         } // End component hits loop.
 
-        // This is a check to ensure ALL hits are either components or seeds. 
-        for (CalorimeterHit check : hitList){
-        	if(!hitSeedMap.containsKey(check)){
-        		System.out.println("Something is not clustered or component!");
-        		System.out.println("not clustered:"+"\t"+check.getIdentifierFieldValue("ix")+"\t"+
-        		check.getIdentifierFieldValue("iy")+"\t"+check.getCorrectedEnergy());
-        	}
-        }
-        
-                
+      
         // Performs second pass calculations for common hits.
         commonHitsLoop:
         for (CalorimeterHit clusteredHit : hitSeedMap.keySet()) {
+        	        	
         	// Seed hits are never common hits and can be skipped.
-        	if(hitSeedMap.get(clusteredHit) == clusteredHit || surrSeedSet.contains(clusteredHit)) { continue commonHitsLoop; }
+        	if(hitSeedMap.get(clusteredHit) == clusteredHit) { continue commonHitsLoop; }
         	
     		// Get the current clustered hit's neighboring crystals.
             Set<Long> clusteredNeighbors = neighborMap.get(clusteredHit.getCellID());
@@ -419,42 +423,45 @@
             	CalorimeterHit clusteredNeighborHit = hitMap.get(neighbor);
             	
             	// If it exists, add it to the neighboring hit list.
-                if (clusteredNeighborHit != null) {
+
+                if (clusteredNeighborHit != null && hitSeedMap.get(clusteredNeighborHit) != null) {         	
                 	clusteredNeighborHits.add(clusteredNeighborHit);
                 }
             }
             
             // Get the seed hit associated with this clustered hit.
             CalorimeterHit clusteredHitSeed = hitSeedMap.get(clusteredHit);
+
             
             // Loop over the clustered neighbor hits.
             for (CalorimeterHit clusteredNeighborHit : clusteredNeighborHits) {
             	// Check to make sure that the clustered neighbor hit
             	// is not already associated with the current clustered
-            	// hit's seed.
+            	// hit's seed.                    	
             	
-                if (hitSeedMap.get(clusteredNeighborHit) != clusteredHitSeed){
-
-                    //if (clusteredHit.getCorrectedEnergy() < clusteredNeighborHit.getCorrectedEnergy()) {
-                	if(!equalEnergies(clusteredHit, clusteredNeighborHit)){
-                	// Check and see if a list of common seeds
-                    	// for this hit already exists or not.
-                    	List<CalorimeterHit> commonHitList = commonHits.get(clusteredHit);
+                if ((hitSeedMap.get(clusteredNeighborHit) != clusteredHitSeed)){
+                	// Check for lowest energy hit and that comparison hit is not already common. 
+                	// If already common, this boundary is already accounted for. 
+                	if(!equalEnergies(clusteredHit, clusteredNeighborHit)
+                			&& !commonHits.containsKey(clusteredNeighborHit)){
+                		                		     		
+                			// Check and see if a list of common seeds
+                			// for this hit already exists or not.
+                			List<CalorimeterHit> commonHitList = commonHits.get(clusteredHit);
                     	
-                    	// If it does not, make a new one.
-                    	if(commonHitList == null) { commonHitList = new ArrayList<CalorimeterHit>(); }
+                			// If it does not, make a new one.                 	
+                			if(commonHitList == null) { commonHitList = new ArrayList<CalorimeterHit>();}
                     	
-                    	// Add the neighbors to the seeds to set of
-                    	// common seeds.
-                        commonHitList.add(clusteredHitSeed);
-                       	commonHitList.add(hitSeedMap.get(clusteredNeighborHit));
+                			// Add the neighbors to the seeds to set of
+                			// common seeds.
+                			commonHitList.add(clusteredHitSeed);                			
+                			commonHitList.add(hitSeedMap.get(clusteredNeighborHit));
                         
-                        // Put the common seed list back into the set.
-                        commonHits.put(clusteredHit, commonHitList);
-                    }
-                }
-                
-                
+                			// Put the common seed list back into the set.
+                			commonHits.put(clusteredHit, commonHitList); 
+                			
+                	}
+                }             
             }
         } // End common hits loop.
 
@@ -466,7 +473,7 @@
         
         
         /*
-         * All hits are sorted from above. The next part of the code is for calculating energies.
+         * All hits are sorted from above. The next part of the code is for calculating energies and positions.
          */
                 
         //Create map to contain the total energy of each cluster
@@ -483,11 +490,11 @@
         for (Map.Entry<CalorimeterHit, CalorimeterHit> entry : hitSeedMap.entrySet()) {
             CalorimeterHit eSeed = entry.getValue();
             double eEnergy = seedEnergy.get(eSeed);
-            eEnergy += entry.getKey().getRawEnergy();
+            eEnergy += entry.getKey().getCorrectedEnergy();
             seedEnergy.put(eSeed, eEnergy);
         }
 
-        // Create a map to contain final uncorrected cluster energies with common hit distributions.
+        // Create a map to contain final uncorrected cluster energies including common hit distributions.
         Map<CalorimeterHit, Double> seedEnergyTot = seedEnergy;
         
         //Distribute common hit energies with clusters
@@ -495,8 +502,8 @@
         	CalorimeterHit commonCell = entry1.getKey();
         	CalorimeterHit seedA = entry1.getValue().get(0);
         	CalorimeterHit seedB = entry1.getValue().get(1);    	
-        	double eFractionA = seedEnergy.get(seedA)/(seedEnergy.get(seedA)+seedEnergy.get(seedB));
-        	double eFractionB = seedEnergy.get(seedB)/(seedEnergy.get(seedA)+seedEnergy.get(seedB));
+        	double eFractionA = (seedEnergy.get(seedA))/((seedEnergy.get(seedA)+seedEnergy.get(seedB)));
+        	double eFractionB = (seedEnergy.get(seedB))/((seedEnergy.get(seedA)+seedEnergy.get(seedB)));
         	double currEnergyA = seedEnergyTot.get(seedA);
         	double currEnergyB = seedEnergyTot.get(seedB);
         	currEnergyA += eFractionA * commonCell.getCorrectedEnergy();
@@ -511,40 +518,29 @@
         Map<CalorimeterHit, Double> seedEnergyCorr = new HashMap<CalorimeterHit, Double>();
         
         // Energy Corrections as per HPS Note 2014-001
-        if (mcList.size() > 0) {
-            int pdg = mcList.get(0).getPDGID();
-
             // Iterate through known clusters with energies and apply correction.
             for (Map.Entry<CalorimeterHit, Double> entryC : seedEnergyTot.entrySet()) {
                 double rawEnergy = entryC.getValue();
-                if (pdg == 11) {// electron energy correction
-                    double corrEnergy = rawEnergy / (-0.0027 * rawEnergy - 0.06 / (Math.sqrt(rawEnergy)) + 0.95);
-                    seedEnergyCorr.put(entryC.getKey(), corrEnergy);
-                } else if (pdg == 22) {// photon energy correction
-                    double corrEnergy = rawEnergy / (0.0015 * rawEnergy - 0.047 / (Math.sqrt(rawEnergy)) + 0.94);
-                    seedEnergyCorr.put(entryC.getKey(), corrEnergy);
+                
+                // Energy correction for initial guess of electron:
+                int pdg = 11;
+                double corrEnergy = enCorrection(pdg, rawEnergy);
 
-                } else if (pdg == -11) {// positron energy correction
-                    double corrEnergy = rawEnergy / (-0.0096 * rawEnergy - 0.042 / (Math.sqrt(rawEnergy)) + 0.94);
-                    seedEnergyCorr.put(entryC.getKey(), corrEnergy);
-                } else {// some other particle, but I have no energy correction for this
-                    double corrEnergy = rawEnergy;
-                    seedEnergyCorr.put(entryC.getKey(), corrEnergy);
-                }
+                seedEnergyCorr.put(entryC.getKey(), corrEnergy);    
             }// end of energy corrections
-        }
         
-        
+                
         // Cluster Position as per HPS Note 2014-001
         // Create map with seed as key to position/centroid value
-        Map<CalorimeterHit, Double[]> seedPosition = new HashMap<CalorimeterHit, Double[]>();
+        Map<CalorimeterHit, double[]> rawSeedPosition = new HashMap<CalorimeterHit, double[]>();
+        Map<CalorimeterHit, double[]> corrSeedPosition = new HashMap<CalorimeterHit, double[]>();
         
         // top level iterates through seeds
         for (Map.Entry<CalorimeterHit, Double> entryS : seedEnergyTot.entrySet()) {
         	//get the seed for this iteration
            	CalorimeterHit seedP = entryS.getKey();
            	
-           	double xCl = 0.0; // calculated cluster x position
+           	double xCl = 0.0; // calculated cluster x position, prior to correction
             double yCl = 0.0; // calculated cluster y position
             double eNumX = 0.0; 
             double eNumY = 0.0;
@@ -562,7 +558,7 @@
         			Point hitIndex = new Point(ix, iy);
 
         			// Get the corrected position for this index pair.
-        			Double[] position = correctedPositionMap.get(hitIndex);
+        			double[] position = correctedPositionMap.get(hitIndex);
 
         			// If the result is null, it hasn't been calculated yet.
         			if(position == null) {
@@ -571,7 +567,7 @@
         				double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),(Hep3Vector)new BasicHep3Vector(0,0,-1*((Trd)geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
       
         				// Convert the result to  a Double[] array.
-        				position = new Double[3];
+        				position = new double[3];
         				position[0] = pos[0];
         				position[1] = pos[1];
         				position[2] = pos[2];
@@ -595,106 +591,89 @@
         	xCl = eNumX/eDen;
             yCl = eNumY/eDen;
             
-            Double[] corrPosition = new Double[2];
-            corrPosition[0] = xCl;
-            corrPosition[1] = yCl;
-            seedPosition.put(seedP, corrPosition);
-        		
+            double[] rawPosition = new double[3];
+            rawPosition[0] = xCl*10.0;//mm
+            rawPosition[1] = yCl*10.0;//mm
+            int ix = seedP.getIdentifierFieldValue("ix");
+			int iy = seedP.getIdentifierFieldValue("iy");
+			Point hitIndex = new Point(ix, iy);
+            rawPosition[2] = correctedPositionMap.get(hitIndex)[2];
+            
+            
+            
+            // Apply position correction factors:
+            // Position correction for electron:
+            int pdg = 11;
+            double xCorr = posCorrection(pdg, xCl*10.0, seedEnergyTot.get(seedP));
+           
+            double[] corrPosition = new double[3];
+            corrPosition[0] = xCorr*10.0;//mm
+            corrPosition[1] = yCl*10.0;//mm
+            corrPosition[2] = correctedPositionMap.get(hitIndex)[2];
+                        
+            corrSeedPosition.put(seedP, corrPosition);
+            rawSeedPosition.put(seedP, rawPosition);
+
         	
         }// end of cluster position calculation
 
-        
-              
-        
+                
         /*
-         * Prints the results in event display format. Not analyzed
-         * for efficiency, as this will ultimately not be a part of
-         * the driver and should be handled by the event display output
-         * driver instead. Contains output loops to collection.
+         * Outputs results to cluster collection. 
          */
-        // Only write to the output file is something actually exists.
+        // Only write output if something actually exists.
         if (hitMap.size() != 0) {
-        	// Increment the event number.
-        	eventNum++;
-        	
-        	// Write the event header.
-//        	writeHits.append(String.format("Event\t%d%n", eventNum));
-        	
-        	// Write the calorimeter hits that passed the energy cut.
-            for (CalorimeterHit n : hitList) {
-            	int hix = n.getIdentifierFieldValue("ix");
-            	int hiy = n.getIdentifierFieldValue("iy");
-            	double energy = n.getCorrectedEnergy();
-//            	writeHits.append(String.format("EcalHit\t%d\t%d\t%f%n", hix, hiy, energy));
-            }
-            
-            
+            // Loop over seeds
             for (Map.Entry<CalorimeterHit, CalorimeterHit> entry2 : hitSeedMap.entrySet()) {
                 if (entry2.getKey() == entry2.getValue()){
-                	if((entry2.getKey().getCorrectedEnergy()<seedEnergyThreshold)
-                		||(seedEnergyTot.get(entry2.getKey())<clusterEnergyThreshold)) 
+                	if(seedEnergyCorr.get(entry2.getKey())<clusterEnergyThreshold) 
                 	{	
-                		rejectedHitList.add(entry2.getKey());
+                		//Not clustered for not passing cuts
+                		rejectedHitList.add(entry2.getKey()); 
                 	}
                 	
                 	else{
-                	
-                		int six = entry2.getKey().getIdentifierFieldValue("ix");
-                		int siy = entry2.getKey().getIdentifierFieldValue("iy");
-//                		writeHits.append(String.format("Cluster\t%d\t%d\t%f%n", six, siy, energy));
-                	
+                		// New cluster
                 		HPSEcalClusterIC cluster = new HPSEcalClusterIC(entry2.getKey());
-                		cluster.addHit(entry2.getKey());
-                		
-                		//can't seem to get this to go into cluster information-------!!!!
- //                      	cluster.addPositionCorr(seedPosition.get(entry2.getKey()));
-                		if (seedEnergyCorr.values().size() > 0)
-                		    cluster.setEnergy(seedEnergyCorr.get(entry2.getKey()));
-
+                		clusterList.add(cluster);
+                		// Loop over hits belonging to seeds
                 		for (Map.Entry<CalorimeterHit, CalorimeterHit> entry3 : hitSeedMap.entrySet()) {
                 			if (entry3.getValue() == entry2.getValue()) {
                 				if(rejectedHitList.contains(entry2.getValue())){
                 					rejectedHitList.add(entry3.getKey());
                 				}
                 				else{
-                					int ix = entry3.getKey().getIdentifierFieldValue("ix");
-                					int iy = entry3.getKey().getIdentifierFieldValue("iy");
-//                       			writeHits.append(String.format("CompHit\t%d\t%d%n", ix, iy));
-                        	
+                					// Add hit to cluster
                 					cluster.addHit(entry3.getKey());
                 				}
                 			}
                 		}
                 		
                     for (Map.Entry<CalorimeterHit, List<CalorimeterHit>> entry4 : commonHits.entrySet()) {
-                        if (entry4.getValue().contains(entry2.getKey())) {
-                        	int ix = entry4.getKey().getIdentifierFieldValue("ix");
-                        	int iy = entry4.getKey().getIdentifierFieldValue("iy");
-//                        	writeHits.append(String.format("SharHit\t%d\t%d%n", ix, iy));
-                        	
-                        	// Added in shared hits for energy distribution between clusters, changed by HS 02JUN14
-//                            cluster.addHit(entry4.getKey());
-                            cluster.addSharedHit(entry4.getKey());
+                        if (entry4.getValue().contains(entry2.getKey())) {                       	
+                        	// Add shared hits for energy distribution between clusters
+                            cluster.addSharedHit(entry4.getKey()); 
                         }
                     }
-                    for(CalorimeterHit q : rejectedHitList)
-                    {// This does not output in correct event display format, just for de-bugging
-//                    	writeHits.append("Rejected"+q.getIdentifierFieldValue("ix")+"\t"+q.getIdentifierFieldValue("iy")+"\n");
-                    }
+                                        
+                    //Input both raw and corrected cluster energies
+            		if (seedEnergyCorr.values().size() > 0){
+            			cluster.setEnergy(seedEnergyCorr.get(entry2.getKey()));
+            			cluster.setRawEnergy(seedEnergyTot.get(entry2.getKey()));
+            			}
+
+            		//Input both uncorrected and corrected cluster positions. 
+            		cluster.setCorrPosition(corrSeedPosition.get(entry2.getKey()));
+            		cluster.setRawPosition(rawSeedPosition.get(entry2.getKey()));
+                  
                     
-                    
-                   	clusterList.add(cluster);
                 	}// End checking thresholds and write out.
-                }
-                
-            
+                }                            
             } //End cluster loop
-         // Write the event termination header.
-//            writeHits.append("EndEvent\n");
 //            System.out.println("Number of clusters: "+clusterList.size());    
 
             
-        } //End event display out loop.
+        } //End event output loop.
         int flag = 1 << LCIOConstants.CLBIT_HITS;
         event.put(clusterCollectionName, clusterList, HPSEcalClusterIC.class, flag);
         event.put(rejectedHitName, rejectedHitList, CalorimeterHit.class, flag);
@@ -712,49 +691,11 @@
         }
     }
     
-  /*  private static class EnergyComparator implements Comparator<CalorimeterHit> {
-        public int compare(CalorimeterHit o1, CalorimeterHit o2) {
-        	// If the energies are equivalent, the same, the two hits
-        	// are considered equivalent.
-        	if(o1.getCorrectedEnergy() == o2.getCorrectedEnergy()) { return 0; }
-        	
-        	// Higher energy hits are ranked higher than lower energy hits.
-        	else if(o1.getCorrectedEnergy() > o2.getCorrectedEnergy()) { return -1; }
-        	
-        	// Lower energy hits are ranked lower than higher energy hits.
-        	else { return 1; }
-        }
-    }*/
-    
-/*    // Also accounts for pathological case of cluster hits that are EXACTLY the same.
+ 
     private static class EnergyComparator implements Comparator<CalorimeterHit> {
-        public int compare(CalorimeterHit o1, CalorimeterHit o2) {
-        	// If the energies are equivalent, the same, the two hits
-        	// are considered equivalent.
-        	if(o1.getCorrectedEnergy() == o2.getCorrectedEnergy()) { 
-        		if(Math.abs(o1.getIdentifierFieldValue("iy")) < Math.abs(o2.getIdentifierFieldValue("iy"))){
-        			return -1;
-        		}
-        		else if((Math.abs(o1.getIdentifierFieldValue("iy")) == Math.abs(o2.getIdentifierFieldValue("iy")))
-        			&& (o1.getIdentifierFieldValue("ix") < o2.getIdentifierFieldValue("ix"))){
-        			return -1; }
-        		else if (Math.abs(o1.getIdentifierFieldValue("iy")) > Math.abs(o2.getIdentifierFieldValue("iy"))){
-        			return 1;
-        		}
-        		else{return 1;}
-        	}
-        	// Higher energy hits are ranked higher than lower energy hits.
-        	else if(o1.getCorrectedEnergy() > o2.getCorrectedEnergy()) { return -1; }
-        	
-        	// Lower energy hits are ranked lower than higher energy hits.
-        	else { return 1; }
-        }
-    }
-*/
-    private static class EnergyComparator implements Comparator<CalorimeterHit> {
     	/**
     	 * Compares the first hit with respect to the second. This
-    	 * method will compare hits first by energy, and the spatially.
+    	 * method will compare hits first by energy, and then spatially.
     	 * In the case of equal energy hits, the hit closest to the
     	 * beam gap and closest to the positron side of the detector
     	 * will be selected. If all of these conditions are true, the
@@ -815,7 +756,12 @@
      
     
 
-    // Handles pathological case where multiple neighboring crystals have EXACTLY the same energy.
+    /**
+     * Handles pathological case where multiple neighboring crystals have EXACTLY the same energy.
+     * @param hit
+     * @param neighbor Neighbor to hit
+     * @return boolean value of if the hit is a seed
+     */
     private boolean equalEnergies(CalorimeterHit hit, CalorimeterHit neighbor){
     	boolean isSeed = true;
     	
@@ -836,7 +782,89 @@
     	}
     	return isSeed;	
     }
+    /**
+     * Calculates energy correction based on cluster raw energy and particle type as per 
+     *<a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+     * @param pdg Particle id as per PDG
+     * @param rawEnergy Raw Energy of the cluster (sum of hits with shared hit distribution)
+     * @return Corrected Energy
+     */    
+    public double enCorrection(int pdg, double rawEnergy){
+  	   if (pdg == 11) { // Particle is electron  		   
+  		   return energyCorrection(rawEnergy, ELECTRON_ENERGY_A, ELECTRON_ENERGY_B, ELECTRON_ENERGY_C);   
+  	   }
+  	   else if (pdg == -11) { //Particle is positron
+		   return energyCorrection(rawEnergy, POSITRON_ENERGY_A, POSITRON_ENERGY_B, POSITRON_ENERGY_C);   
+  	   }
+  	   else if (pdg == 22) { //Particle is photon
+		   return energyCorrection(rawEnergy, PHOTON_ENERGY_A, PHOTON_ENERGY_B, PHOTON_ENERGY_C);   
+  	   }
+  	   else { //Unknown 
+  		   double corrEnergy = rawEnergy;
+  		   return corrEnergy;}
+  	   
+     }   
     
+    /**
+     * Calculates the energy correction to a cluster given the variables from the fit as per
+     * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+     * @param rawEnergy Raw energy of the cluster
+     * @param A,B,C from fitting in note
+     * @return Corrected Energy
+     */   
+    public double energyCorrection(double rawEnergy, double varA, double varB, double varC){
+    	double corrEnergy = rawEnergy / (varA * rawEnergy + varB / (Math.sqrt(rawEnergy)) + varC);
+    	return corrEnergy;
+    }
+       
     
+    /**
+     * Calculates position correction based on cluster raw energy, x calculated position, 
+     * and particle type as per 
+     * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+     * @param pdg Particle id as per PDG
+     * @param xCl Calculated x centroid position of the cluster, uncorrected, at face
+     * @param rawEnergy Raw energy of the cluster (sum of hits with shared hit distribution)
+     * @return Corrected x position
+     */
+    public double posCorrection(int pdg, double xPos, double rawEnergy){
+    	double xCl = xPos/10.0;//convert to mm
+    	if (pdg == 11) { //Particle is electron    	
+    		double xCorr = positionCorrection(xCl, rawEnergy, ELECTRON_POS_A, ELECTRON_POS_B, ELECTRON_POS_C, ELECTRON_POS_D, ELECTRON_POS_E);
+    		return xCorr*10.0;
+    	}
+    	else if (pdg == -11) {// Particle is positron   	
+    		double xCorr = positionCorrection(xCl, rawEnergy, POSITRON_POS_A, POSITRON_POS_B, POSITRON_POS_C, POSITRON_POS_D, POSITRON_POS_E);
+    		return xCorr*10.0;
+    	}
+    	else if (pdg == 22) {// Particle is photon  	
+    		double xCorr = positionCorrection(xCl, rawEnergy, PHOTON_POS_A, PHOTON_POS_B, PHOTON_POS_C, PHOTON_POS_D, PHOTON_POS_E);
+    		return xCorr*10.0;
+    	}
+    	else { //Unknown 
+    		double xCorr = xCl;
+    		return xCorr*10.0;}
+    	}
     
-}    
+    
+   /**
+    * Calculates the position correction in cm using the raw energy and variables associated with the fit
+    * of the particle as described in  
+    * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+    * @param xCl
+    * @param rawEnergy
+    * @param varA
+    * @param varB
+    * @param varC
+    * @param varD
+    * @param varE
+    * @return
+    */    
+    public double positionCorrection(double xCl, double rawEnergy, double varA, double varB, double varC, double varD, double varE){
+    	double xCorr = xCl-(varA/Math.sqrt(rawEnergy) + varB )*xCl-
+				(varC*rawEnergy + varD/Math.sqrt(rawEnergy) + varE);
+    	return xCorr;
+    }
+   
+    	
+ }    
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/src/main/java/org/hps/recon/ecal
HPSEcalClusterIC.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/src/main/java/org/hps/recon/ecal/HPSEcalClusterIC.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/ecal-recon/src/main/java/org/hps/recon/ecal/HPSEcalClusterIC.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,42 +1,66 @@
 package org.hps.recon.ecal;
 
-import hep.physics.vec.BasicHep3Vector;
-import hep.physics.vec.Hep3Vector;
-import hep.physics.vec.VecOp;
-
 import java.util.ArrayList;
 import java.util.List;
 
-import org.lcsim.detector.IGeometryInfo;
-import org.lcsim.detector.solids.Trd;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.base.BaseCluster;
 
 /**
- * Cluster with position defined by seed hit (for 1-bit trigger)
+ * Cluster with addition to include shared hits and set position
+ * as calculated in full cluster code. 
  *
  * @author Sho Uemura <[log in to unmask]>
  * @author Holly Szumila <[log in to unmask]>
  * 
- * @version $Id: HPSEcalCluster.java,v 1.11 2013/02/25 22:39:24 meeg Exp $
  */
 public class HPSEcalClusterIC extends BaseCluster {
 
     private CalorimeterHit seedHit = null;
     private long cellID;
     private ArrayList<CalorimeterHit> sharedHitList = new ArrayList<CalorimeterHit>(); 
+    private double[] rawPosition = new double[3];
+
     
     
-    
-    
     static final double eCriticalW = 800.0*ECalUtils.MeV/(74+1);
     static final double radLenW = 8.8; //mm
     double[] electronPosAtDepth = new double[3];
     private boolean needsElectronPosCalculation = true;
     double[] photonPosAtDepth = new double[3];
     private boolean needsPhotonPosCalculation = true;
-    double[] positionCorrection = new double[2];
     
+ // Variables for electron energy corrections
+    static final double ELECTRON_ENERGY_A = -0.0027;
+    static final double ELECTRON_ENERGY_B = -0.06;
+    static final double ELECTRON_ENERGY_C = 0.95;
+    // Variables for positron energy corrections
+    static final double POSITRON_ENERGY_A = -0.0096;
+    static final double POSITRON_ENERGY_B = -0.042;
+    static final double POSITRON_ENERGY_C = 0.94;
+    // Variables for photon energy corrections
+    static final double PHOTON_ENERGY_A = 0.0015;
+    static final double PHOTON_ENERGY_B = -0.047;
+    static final double PHOTON_ENERGY_C = 0.94;
+    // Variables for electron position corrections
+    static final double ELECTRON_POS_A = 0.0066;
+	static final double ELECTRON_POS_B = -0.03;
+	static final double ELECTRON_POS_C = 0.028;
+	static final double ELECTRON_POS_D = -0.45;
+	static final double ELECTRON_POS_E = 0.465;
+    // Variables for positron position corrections
+	static final double POSITRON_POS_A = 0.0072;
+	static final double POSITRON_POS_B = -0.031;
+	static final double POSITRON_POS_C = 0.007;
+	static final double POSITRON_POS_D = 0.342;
+	static final double POSITRON_POS_E = 0.108;
+    // Variables for photon position corrections
+	static final double PHOTON_POS_A = 0.005;
+	static final double PHOTON_POS_B = -0.032;
+	static final double PHOTON_POS_C = 0.011;
+	static final double PHOTON_POS_D = -0.037;
+	static final double PHOTON_POS_E = 0.294;
+    
     public HPSEcalClusterIC(Long cellID) {
         this.cellID = cellID;
     }
@@ -57,27 +81,136 @@
         }
         return seedHit;
     }
-    
+    /**
+     * Input shared hits between two clusters. 
+     */
     public void addSharedHit(CalorimeterHit sharedHit) {
     	sharedHitList.add(sharedHit);
     }
-    
-    
-    
+    /**
+     * Return shared hit list between two clusters. 
+     */
     public List<CalorimeterHit> getSharedHits() {
     	return sharedHitList;
+    }  
+    /**
+     * Inputs the uncorrected x,y,z position of the cluster.
+     */
+    public void setRawPosition(double[] Position) {
+    	rawPosition = Position;
+    }  
+    /**
+     * Returns the uncorrected x,y,z position of the cluster.
+     */
+    @Override
+    public double[] getPosition(){
+    	return this.rawPosition;
+    }   
+    /**
+     * Do an external calculation of the raw energy and set it. Includes shared hit distribution.
+     */
+    public void setRawEnergy(double rawEnergy){
+    	raw_energy = rawEnergy;
     }
+    /**
+     * Inputs the corrected position of the cluster, see HPS Note 2014-001.
+     */
+    public void setCorrPosition(double[] Position) {
+    	position = Position;
+    }    
+    /**
+     * Returns the corrected position of the cluster. 
+     */
+    public double[] getCorrPosition(){
+    	return this.position;
+    }
     
-    public void addPositionCorr(Double[] posCorr) {
-    	this.addPositionCorr(posCorr);
+    /**
+     * Calculates energy correction based on cluster raw energy and particle type as per 
+     *<a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+     * @param pdg Particle id as per PDG
+     * @param rawEnergy Raw Energy of the cluster (sum of hits with shared hit distribution)
+     * @return Corrected Energy
+     */    
+    public double enCorrection(int pdg, double rawEnergy){
+  	   if (pdg == 11) { // Particle is electron  		   
+  		   return energyCorrection(rawEnergy, ELECTRON_ENERGY_A, ELECTRON_ENERGY_B, ELECTRON_ENERGY_C);   
+  	   }
+  	   else if (pdg == -11) { //Particle is positron
+		   return energyCorrection(rawEnergy, POSITRON_ENERGY_A, POSITRON_ENERGY_B, POSITRON_ENERGY_C);   
+  	   }
+  	   else if (pdg == 22) { //Particle is photon
+		   return energyCorrection(rawEnergy, PHOTON_ENERGY_A, PHOTON_ENERGY_B, PHOTON_ENERGY_C);   
+  	   }
+  	   else { //Unknown 
+  		   double corrEnergy = rawEnergy;
+  		   return corrEnergy;}
+  	   
+     }   
+    
+    /**
+     * Calculates the energy correction to a cluster given the variables from the fit as per
+     * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+     * @param rawEnergy Raw energy of the cluster
+     * @param A,B,C from fitting in note
+     * @return Corrected Energy
+     */   
+    public double energyCorrection(double rawEnergy, double varA, double varB, double varC){
+    	double corrEnergy = rawEnergy / (varA * rawEnergy + varB / (Math.sqrt(rawEnergy)) + varC);
+    	return corrEnergy;
     }
+       
     
+    /**
+     * Calculates position correction based on cluster raw energy, x calculated position, 
+     * and particle type as per 
+     * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+     * @param pdg Particle id as per PDG
+     * @param xCl Calculated x centroid position of the cluster, uncorrected, at face
+     * @param rawEnergy Raw energy of the cluster (sum of hits with shared hit distribution)
+     * @return Corrected x position
+     */
+    public double posCorrection(int pdg, double xPos, double rawEnergy){
+    	double xCl = xPos/10.0;//convert to mm
+    	if (pdg == 11) { //Particle is electron    	
+    		double xCorr = positionCorrection(xCl, rawEnergy, ELECTRON_POS_A, ELECTRON_POS_B, ELECTRON_POS_C, ELECTRON_POS_D, ELECTRON_POS_E);
+    		return xCorr*10.0;
+    	}
+    	else if (pdg == -11) {// Particle is positron   	
+    		double xCorr = positionCorrection(xCl, rawEnergy, POSITRON_POS_A, POSITRON_POS_B, POSITRON_POS_C, POSITRON_POS_D, POSITRON_POS_E);
+    		return xCorr*10.0;
+    	}
+    	else if (pdg == 22) {// Particle is photon  	
+    		double xCorr = positionCorrection(xCl, rawEnergy, PHOTON_POS_A, PHOTON_POS_B, PHOTON_POS_C, PHOTON_POS_D, PHOTON_POS_E);
+    		return xCorr*10.0;
+    	}
+    	else { //Unknown 
+    		double xCorr = xCl;
+    		return xCorr*10.0;}
+    	}
     
-//    public double[] getPosition() {
-//        return getSeedHit().getPosition();
-//    }
-//    
-    @Override
+    
+   /**
+    * Calculates the position correction in cm using the raw energy and variables associated with the fit
+    * of the particle as described in  
+    * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
+    * @param xCl
+    * @param rawEnergy
+    * @param varA
+    * @param varB
+    * @param varC
+    * @param varD
+    * @param varE
+    * @return
+    */    
+    public double positionCorrection(double xCl, double rawEnergy, double varA, double varB, double varC, double varD, double varE){
+    	double xCorr = xCl-(varA/Math.sqrt(rawEnergy) + varB )*xCl-
+				(varC*rawEnergy + varD/Math.sqrt(rawEnergy) + varE);
+    	return xCorr;
+    }
+    
+    
+ /*   @Override
     public double[] getPosition() {
         //Electron by default!?
         return this.getPositionAtShowerMax(true);
@@ -103,7 +236,8 @@
         double y = E/eCriticalW;
         double Cj = isElectron ? -0.5 : 0.5;
         double tmax = Math.log(y) + Cj; //Maximum of dE/dt profile in units of rad. len. 
-        double dmax = tmax*radLenW; //mm
+//        double dmax = tmax*radLenW; //mm
+        double dmax = 0.0; //Changed this to readout crystal centroid at face
         if(isElectron) {
             electronPosAtDepth =  calculatePositionAtDepth(dmax);
         } else {
@@ -175,7 +309,10 @@
             //Find position at shower max
             IGeometryInfo geom = hit.getDetectorElement().getGeometry();
             double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()),(Hep3Vector)new BasicHep3Vector(0,0,dmax-1*((Trd)geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
-                        
+
+
+            
+            
 //            System.out.println("global pos " + global_pos.toString());
 //            System.out.println("local pos " + local_pos.toString());
 //            System.out.println("local pos tmax " + local_pos_tmax.toString());
@@ -441,7 +578,7 @@
         return positionLocal;
     }
     
+  */  
     
     
-    
 }

java/branches/hps_java_trunk_HPSJAVA-255/evio
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/evio/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/evio/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -3,7 +3,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-evio</artifactId>
     <name>evio</name>
-    <description>HPS EVIO utilities package</description>
+    <description>EVIO utilities including EVIO to LCIO event builders</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>

java/branches/hps_java_trunk_HPSJAVA-255/integration-tests
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -2,7 +2,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-integration-tests</artifactId>
     <name>integration-tests</name>
-    <description>Integration test suite</description>
+    <description>integration test suite</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
@@ -27,14 +27,16 @@
                 <artifactId>maven-surefire-plugin</artifactId>
                 <configuration>
                     <excludes>
-                        <exclude>org/hps/TestRunEvioToLcioTest.java</exclude>
-                        <exclude>org/hps/MCFilteredReconTest.java</exclude>
+                        <exclude>org/hps/EtSystemTest.java</exclude>
                     </excludes>
+                    <redirectTestOutputToFile>true</redirectTestOutputToFile>
+                    <trimStackTrace>true</trimStackTrace>
                 </configuration>
             </plugin>
         </plugins>
     </build>
      <profiles>
+        <!-- This profile is for deactivating all integration tests when building everything from trunk. -->
         <profile>
             <id>no-integration-tests</id>
             <activation>
@@ -51,8 +53,35 @@
                     </plugin>
                 </plugins>
             </build>
-        </profile>  
+        </profile> 
         <profile>
+            <id>fast-integration-tests</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+            </activation>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-surefire-plugin</artifactId>
+                        <configuration>
+                            <excludes>
+                                <exclude>org/hps/EcalReadoutSimTest.java</exclude>
+                                <exclude>org/hps/EtSystemTest.java</exclude>
+                                <exclude>org/hps/HPSTestRunTracker2014GeometryTrackReconTest.java</exclude>
+                                <exclude>org/hps/MCFilteredReconTest.java</exclude>
+                                <exclude>org/hps/MockDataReconTest.java</exclude>
+                                <exclude>org/hps/ReadoutNoPileupTest.java</exclude>
+                                <exclude>org/hps/ReconClusterICTest.java</exclude>
+                                <exclude>org/hps/TestRunReconTest.java</exclude>
+                            </excludes>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>                 
+        <!-- This profile activates automatically when not running tests on a SLAC Unix system with NFS access. -->
+        <profile>
             <id>no-slac-nfs</id>
             <activation>
                 <activeByDefault>false</activeByDefault>
@@ -66,8 +95,8 @@
                         <groupId>org.apache.maven.plugins</groupId>
                         <artifactId>maven-surefire-plugin</artifactId>
                         <configuration>
-                            <excludes>
-                                <exclude>org/hps/EcalReadoutSimTest.java</exclude>
+                            <excludes>                            
+                                <exclude>org/hps/EcalReadoutSimTest.java</exclude>                                
                             </excludes>
                         </configuration>
                     </plugin>

java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps
EcalReadoutSimTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/EcalReadoutSimTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/EcalReadoutSimTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -60,6 +60,7 @@
  * 
  * @author Jeremy McCormick <[log in to unmask]>
  */
+// FIXME: The input file for this test is too big and breaks the FileCache.  Use a smaller input file!
 public class EcalReadoutSimTest extends TestCase {
 
     // Expected values of event and collection object totals.
@@ -90,8 +91,8 @@
     static final String triggeredEventsResource = "/org/hps/test/EcalReadoutSimTest/triggered_events.txt";
     
     // File information.        
-    //static final String fileLocation = "ftp://ftp-hps.slac.stanford.edu/hps/hps_data/hps_java_test_case_data/EcalReadoutSimTest.slcio";
-    static final File inputFile = new File("/nfs/slac/g/hps/hps_data/hps_java_test_case_data/EcalReadoutSimTest.slcio");
+    //static final String fileLocation = "http://www.lcsim.org/test/hps-java/EcalReadoutSimTest.slcio";
+    static final File inputFile = new File("/nfs/slac/g/lcd/mc/prj/www/lcsim/test/hps-java/EcalReadoutSimTest.slcio");
     
     static final File outputDir = new File("./target/test-output/" + className);    
     static final File outputFile = new File(outputDir + File.separator + className);
@@ -136,7 +137,7 @@
     private void runEcalReadoutSim() throws Exception {
         
         //FileCache cache = new FileCache();
-        //File inputFile = cache.getCachedFile(new URL(fileLocation));
+        //File inputFile = cache.getCachedFile(new URL(fileLocation));        
                          
         outputDir.mkdirs();
         if (!outputDir.exists()) {

java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps
MCReconTest.java removed after 1243
--- java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/MCReconTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/MCReconTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,123 +0,0 @@
-package org.hps;
-
-import java.io.File;
-import java.net.URL;
-
-import junit.framework.TestCase;
-
-import org.lcsim.event.CalorimeterHit;
-import org.lcsim.event.Cluster;
-import org.lcsim.event.EventHeader;
-import org.lcsim.event.ReconstructedParticle;
-import org.lcsim.event.Track;
-import org.lcsim.event.TrackerHit;
-import org.lcsim.job.JobControlManager;
-import org.lcsim.util.Driver;
-import org.lcsim.util.cache.FileCache;
-import org.lcsim.util.test.TestUtil.TestOutputFile;
-
-/**
- * Run the reconstruction on output from the readout simulation.
- * 
- * @author Jeremy McCormick <[log in to unmask]>
- */
-// FIXME: Change to MCReadoutReconTest
-public class MCReconTest extends TestCase {
-    
-    File reconOutputFile = new TestOutputFile("recon");
-    
-    static final String fileLocation = "http://www.lcsim.org/test/hps-java/MCReconTest.slcio";
-    
-    /*
-    static final int TOTAL_CLUSTERS = 3960;        
-    static final int TOTAL_TRACKER_HITS = 28689;
-    static final int TOTAL_CALORIMETER_HITS = 61924;
-       
-    static final long TOTAL_RECON_EVENTS = 945;
-    static final int TOTAL_TRACKS = 2086;
-    static final int TOTAL_TRACKS_DELTA = 13;
-    static final int TOTAL_TRACKS_LOWER = TOTAL_TRACKS - TOTAL_TRACKS_DELTA;
-    static final int TOTAL_TRACKS_UPPER = TOTAL_TRACKS + TOTAL_TRACKS_DELTA;
-    
-    static final int TOTAL_RECONSTRUCTED_PARTICLES = 4321; 
-    static final int TOTAL_RECONSTRUCTED_PARTICLES_DELTA = 9;
-    static final int TOTAL_RECONSTRUCTED_PARTICLES_LOWER = TOTAL_RECONSTRUCTED_PARTICLES - TOTAL_RECONSTRUCTED_PARTICLES_DELTA;
-    static final int TOTAL_RECONSTRUCTED_PARTICLES_UPPER = TOTAL_RECONSTRUCTED_PARTICLES + TOTAL_RECONSTRUCTED_PARTICLES_DELTA;
-    */
-        
-    public void testMCRecon() throws Exception {
-        
-        FileCache cache = new FileCache();
-        File inputFile = cache.getCachedFile(new URL(fileLocation));
-        
-        System.out.println("Running MC recon on " + inputFile.getPath() + " ...");
-        JobControlManager job = new JobControlManager();
-        job.addVariableDefinition("outputFile", reconOutputFile.getPath());
-        job.addInputFile(inputFile);
-        job.setup("/org/hps/steering/recon/HPS2014OfflineTruthRecon.lcsim");
-        ReconCheckDriver reconCheckDriver = new ReconCheckDriver();
-        job.getLCSimLoop().add(reconCheckDriver);
-        long startMillis = System.currentTimeMillis();
-        job.run();
-        long elapsedMillis = System.currentTimeMillis() - startMillis;
-        long nevents = job.getLCSimLoop().getTotalSupplied();
-        System.out.println("MC recon processed " + job.getLCSimLoop().getTotalSupplied() + " events.");
-        System.out.print("MC recon took " + ((double)elapsedMillis/1000L) + " seconds");
-        System.out.println(" which is " + ((double)elapsedMillis / (double)nevents) + " ms per event.");
-        job.getLCSimLoop().dispose();
-                
-        //TestCase.assertEquals("Number of recon events processed was wrong.", TOTAL_RECON_EVENTS, nevents);     
-                                
-        //assertEquals("Wrong number of tracker hits.", TOTAL_TRACKER_HITS, reconCheckDriver.nTrackerHits);
-        //assertEquals("Wrong number of calorimeter hits.", TOTAL_CALORIMETER_HITS, reconCheckDriver.nCalorimeterHits);
-        //assertEquals("Wrong number of clusters.", TOTAL_CLUSTERS, reconCheckDriver.nClusters);
-        //TestCase.assertTrue("Number of tracks not within acceptable range.", 
-        //        (reconCheckDriver.nTracks >= TOTAL_TRACKS_LOWER && reconCheckDriver.nTracks <= TOTAL_TRACKS_UPPER));
-        //assertTrue("Number of reconstructed particles not within acceptable range.", 
-        //        (reconCheckDriver.nReconstructedParticles >= TOTAL_RECONSTRUCTED_PARTICLES_LOWER 
-        //        && reconCheckDriver.nReconstructedParticles <= TOTAL_RECONSTRUCTED_PARTICLES_UPPER));
-    }          
-    
-    static class ReconCheckDriver extends Driver {
-        
-        int nTracks;
-        int nClusters;        
-        int nTrackerHits;
-        int nCalorimeterHits;
-        int nReconstructedParticles;
-        int nEvents;
-        
-        public void process(EventHeader event) {
-            //System.out.println("ReconCheckDriver - event #" + event.getEventNumber());
-            ++nEvents;
-            if (event.hasCollection(Track.class, "MatchedTracks")) {
-                nTracks += event.get(Track.class, "MatchedTracks").size();
-                //System.out.println("  MatchedTracks: " + event.get(Track.class, "MatchedTracks").size());
-            }
-            if (event.hasCollection(Cluster.class, "EcalClusters")) {
-                nClusters += event.get(Cluster.class, "EcalClusters").size();
-                //System.out.println("  EcalClusters: " + event.get(Cluster.class, "EcalClusters").size());
-            }
-            if (event.hasCollection(TrackerHit.class, "RotatedHelicalTrackHits")) {
-                nTrackerHits += event.get(TrackerHit.class, "RotatedHelicalTrackHits").size();
-            }
-            if (event.hasCollection(CalorimeterHit.class, "EcalCalHits")) {
-                nCalorimeterHits += event.get(CalorimeterHit.class, "EcalCalHits").size();
-            }
-            if (event.hasCollection(ReconstructedParticle.class, "FinalStateParticles")) {
-                nReconstructedParticles += event.get(ReconstructedParticle.class, "FinalStateParticles").size();
-                //System.out.println("  FinalStateParticles: " + event.get(ReconstructedParticle.class, "FinalStateParticles").size());
-            }
-        }        
-        
-        public void endOfData() {
-            System.out.println("ReconCheckDriver results ...");
-            System.out.println("  nEvents: " + nEvents);
-            System.out.println("  nTracks: " + nTracks);
-            System.out.println("  nClusters: " + nClusters);
-            System.out.println("  nTrackerHits: " + nTrackerHits);
-            System.out.println("  nCalorimeterHits: " + nCalorimeterHits);
-            System.out.println("  nReconstructedParticles: " + nReconstructedParticles);
-        }
-    }              
-}

java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps
ReadoutNoPileupTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/ReadoutNoPileupTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/ReadoutNoPileupTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -25,7 +25,9 @@
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class ReadoutNoPileupTest extends TestCase {
-        
+    
+    static final int nEvents = 100;
+    
     public void testReadoutNoPileup() throws Exception {
         new TestOutputFile(this.getClass().getSimpleName()).mkdir();
         
@@ -37,6 +39,7 @@
         File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName() + "_readout");
         job.addVariableDefinition("outputFile", outputFile.getPath());
         job.setup("/org/hps/steering/readout/HPS2014ReadoutNoPileup.lcsim");
+        job.setNumberOfEvents(nEvents);
         job.run();
     }
 }

java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps
ReadoutToEvioTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/ReadoutToEvioTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/ReadoutToEvioTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -23,6 +23,8 @@
  */
 public class ReadoutToEvioTest extends TestCase {
     
+    static final int nEvents = 100;
+    
     public void testReadoutToEvio() throws Exception {
         new TestOutputFile(this.getClass().getSimpleName()).mkdir();
         
@@ -34,6 +36,7 @@
         File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName() + "_readout");
         job.addVariableDefinition("outputFile", outputFile.getPath());
         job.setup("/org/hps/steering/readout/HPS2014ReadoutToEvio.lcsim");
+        job.setNumberOfEvents(nEvents);
         job.run();
     }
 }

java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps
ReadoutToLcioTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/ReadoutToLcioTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/ReadoutToLcioTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -22,7 +22,9 @@
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class ReadoutToLcioTest extends TestCase {
-        
+    
+    static final int nEvents = 100;
+    
     public void testReadoutToLcio() throws Exception {
         
         new TestOutputFile(this.getClass().getSimpleName()).mkdir();
@@ -35,6 +37,7 @@
         File outputFile = new TestOutputFile(this.getClass().getSimpleName() + File.separator + this.getClass().getSimpleName());
         job.addVariableDefinition("outputFile", outputFile.getPath());
         job.setup("/org/hps/steering/readout/HPS2014ReadoutToLcio.lcsim");
+        job.setNumberOfEvents(nEvents);
         job.run();
     }
 

java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps
TestRunReadoutToEvioTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/TestRunReadoutToEvioTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/integration-tests/src/test/java/org/hps/TestRunReadoutToEvioTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -17,7 +17,9 @@
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class TestRunReadoutToEvioTest extends TestCase {
-        
+    
+    static final int nEvents = 100;
+    
     public void testTestRunReadoutToEvio() throws Exception {
         
         new TestOutputFile(this.getClass().getSimpleName()).mkdir();
@@ -31,6 +33,7 @@
         job.addVariableDefinition("outputFile", outputFile.getPath());
         job.addVariableDefinition("runNumber", "1351");
         job.setup("/org/hps/steering/readout/TestRunReadoutToEvio.lcsim");
+        job.setNumberOfEvents(nEvents);
         job.run();       
     }
 }

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -2,7 +2,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-monitoring-app</artifactId>
     <name>monitoring-app</name>
-    <description>HPS online monitoring application</description>
+    <description>online monitoring application</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui
JobSettingsPanel.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui/JobSettingsPanel.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui/JobSettingsPanel.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -357,54 +357,5 @@
                 steeringResourcesComboBox.setSelectedItem(value);
             }                                          
         }
-    }   
-    
-    /**
-     * Setup the event builder from the field setting.
-     * @return True if builder is setup successfully; false if not.
-     */
-    // FIXME: This method should throw an exception if an error occurs.
-    /*
-    void editEventBuilder() {
-        String eventBuilderClassName = eventBuilderField.getText();
-        boolean okay = true;
-        try {
-            // Test that the event builder can be created without throwing any exceptions.
-            Class<?> eventBuilderClass = Class.forName(eventBuilderClassName);
-            eventBuilderClass.newInstance();
-        } catch (Exception e) {
-            throw new RuntimeException("Error setting up event builder.", e);
-        }        
-        catch (ClassNotFoundException e) {
-            JOptionPane.showMessageDialog(this, "The event builder class does not exist.");
-            okay = false;
-        } 
-        catch (InstantiationException e) {
-            JOptionPane.showMessageDialog(this, "Failed to instantiate instance of event builder class.");
-            okay = false;
-        } 
-        catch (IllegalAccessException e) {
-            JOptionPane.showMessageDialog(this, "Couldn't access event builder class.");
-            okay = false;
-        }
-        
-        if (!okay)
-            resetEventBuilder();
-    }    
-
-    /**
-     * Reset the event builder to the default.
-     */
-    /*
-    // FIXME: Handle this with property change listener and use old value if new one is invalid.    
-    private void resetEventBuilder() {
-        SwingUtilities.invokeLater(new Runnable() {
-            public void run() {
-                eventBuilderField.setText(DEFAULT_EVENT_BUILDER_CLASS_NAME);
-            }
-        });
-    }
-    */
-
-    
+    }        
 }
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui
MonitoringApplication.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui/MonitoringApplication.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui/MonitoringApplication.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -23,7 +23,6 @@
 import static org.hps.monitoring.gui.Commands.VALIDATE_DATA_FILE;
 import static org.hps.monitoring.gui.model.ConfigurationModel.MONITORING_APPLICATION_LAYOUT_PROPERTY;
 import static org.hps.monitoring.gui.model.ConfigurationModel.SAVE_LAYOUT_PROPERTY;
-import static org.hps.monitoring.gui.model.ConfigurationModel.LOG_TO_FILE_PROPERTY;
 import hep.aida.jfree.plotter.PlotterRegion;
 import hep.aida.jfree.plotter.PlotterRegionListener;
 
@@ -86,6 +85,8 @@
 import org.hps.monitoring.subsys.SystemStatus;
 import org.hps.monitoring.subsys.SystemStatusListener;
 import org.hps.monitoring.subsys.SystemStatusRegistry;
+import org.hps.monitoring.subsys.et.EtSystemMonitor;
+import org.hps.monitoring.subsys.et.EtSystemStripCharts;
 import org.hps.record.composite.CompositeLoop;
 import org.hps.record.composite.CompositeLoopConfiguration;
 import org.hps.record.composite.EventProcessingThread;
@@ -321,7 +322,7 @@
     
     /**
      * Handle a property change event.
-     * @evt The property change event.
+     * @param evt The property change event.
      */
     @Override
     public void propertyChange(PropertyChangeEvent evt) {
@@ -408,11 +409,12 @@
      */
     private void setupUncaughtExceptionHandler() {
         Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {            
-            public void uncaughtException(Thread thread, Throwable exception) {
-               MonitoringApplication.this.errorHandler.setError(exception)
+            public void uncaughtException(Thread thread, Throwable exception) {                               
+                MonitoringApplication.this.errorHandler.setError(exception)
                    .log()
                    .printStackTrace()
                    .showErrorDialog();
+                // FIXME: This should probably cause a system.exit after the dialog box is closed!
             }
         });
     }
@@ -468,7 +470,6 @@
      */
     private void setupAida() {
         MonitoringAnalysisFactory.register();
-        MonitoringAnalysisFactory.configure();
         MonitoringPlotFactory.setRootPane(this.plotWindow.getPlotPane());
         MonitoringPlotFactory.setPlotterRegionListener(new PlotterRegionListener() {
             @Override
@@ -1078,7 +1079,7 @@
      */
     private void disconnect(ConnectionStatus status) {
 
-        log(Level.FINE, "Disconnecting from the ET server.");
+        log(Level.FINE, "Disconnecting the current session.");
 
         // Cleanup the ET connection.
         cleanupEtConnection();
@@ -1089,11 +1090,11 @@
         // Finally, change application state to fully disconnected.
         setConnectionStatus(ConnectionStatus.DISCONNECTED);
 
-        // Set the application status from the caller if an error had occurred.
+        // Set the application status from the caller if an error occurred.
         if (status == ConnectionStatus.ERROR)
             setConnectionStatus(status);
 
-        log(Level.INFO, "Disconnected from the ET server.");
+        log(Level.INFO, "Disconnected from the session.");
     }
 
     /**
@@ -1102,7 +1103,9 @@
     private void cleanupEtConnection() {
         if (connection != null) {     
             if (connection.getEtSystem().alive()) {
+                log(Level.FINEST, "Cleaning up the ET connection.");
                 connection.cleanup();
+                log(Level.FINEST, "Done cleaning up the ET connection.");
             }
             connection = null;
         }
@@ -1346,19 +1349,18 @@
         }        
 
         
-        // DEBUG: Turn these off while doing other stuff!!!!
-        
+        // DEBUG: Turn these off while doing other stuff!!!!        
         // Using ET server?
-        //if (usingEtServer()) {
+        if (usingEtServer()) {
 
             // ET system monitor.
             // FIXME: Make whether this is run or not configurable through the JobPanel.
-            //loopConfig.add(new EtSystemMonitor());
+            loopConfig.add(new EtSystemMonitor());
             
             // ET system strip charts.
             // FIXME: Make whether this is run or not configurable through the JobPanel.
-            //loopConfig.add(new EtSystemStripCharts());
-        //}
+            loopConfig.add(new EtSystemStripCharts());
+        }
               
         // RunPanel updater.
         loopConfig.add(runPanel.new RunModelUpdater());
@@ -1450,20 +1452,17 @@
         try {
             // Log message.
             logger.log(Level.FINER, "Stopping the session.");
+            
+            // Kill the watchdog thread which looks for disconnects, if it is active.
+            killSessionWatchdogThread();
                         
-            // Save AIDA file.
+            // Automatically write AIDA file from job settings.
             saveAidaFile();
         
-            // Disconnect from the ET system.
-            if (usingEtServer()) {
-                // Disconnect from the ET system.
-                disconnect();
-            } else { 
-                // When using direct file streaming, just need to toggle GUI state.
-                setDisconnectedGuiState();
-            }
+            // Disconnect from ET system, if using the ET server, and set the proper disconnected GUI state.           
+            disconnect();
             
-            // Terminate event processing.
+            // Stop the event processing, which is called after the ET system goes down to avoid hanging in calls to ET system.
             stopEventProcessing();
                 
             logger.log(Level.INFO, "Session was stopped.");
@@ -1475,47 +1474,42 @@
     }
                                        
     /**
-     * Finish event processing and stop its thread, first killing the session watchdog 
-     * thread, if necessary.  The event processing thread may still be alive after 
-     * this method, e.g. if there is a call to <code>EtSystem.getEvents()</code> happening.
-     * In this case, event processing will exit later when the ET system goes down.
+     * Stop the event processing by executing a <code>STOP</code> command on the 
+     * record loop and killing the event processing thread.  This is executed
+     * after the ET system is disconnected so that the event processing does
+     * not potentially hang in a call to <code>EtSystem.getEvents()</code> forever.
      */
     private void stopEventProcessing() {
-            
-        // Is the event processing thread not null? 
+
+        // Is the event processing thread not null?
         if (processingThread != null) {
             
             // Is the event processing thread actually still alive?
             if (processingThread.isAlive()) {
-
-                // Interrupt and kill the event processing watchdog thread if necessary.
-                killSessionWatchdogThread();
-               
+                
                 // Request the event processing loop to execute stop.
-                loop.execute(Command.STOP);                
+                loop.execute(Command.STOP);
+                
+                try {
+                    // This should always work, because the ET system is disconnected before this.
+                    processingThread.join();
+                } catch (InterruptedException e) {
+                    // Don't know when this would ever happen.
+                    e.printStackTrace();                   
+                }
             }
 
-            // Wait for the event processing thread to finish.  This should just return
-            // immediately if it isn't alive so don't bother checking if alive is false.
-            try {
-                // In the case where ET is configured for sleep or timed wait, an untimed join could 
-                // block forever, so only wait for ~1 second before continuing.  The EventProcessingChain
-                // should still cleanup automatically when its thread completes after the ET system goes down.
-                processingThread.join(1000);
-            } catch (InterruptedException e) {
-                // Don't know when this would ever happen.
-            }
-       
             // Notify of last error that occurred in event processing.
             if (loop.getLastError() != null) {
                 errorHandler.setError(loop.getLastError()).log().printStackTrace();
             }
-       
-            // Reset event processing objects for next session.
-            loop.dispose();
-            loop = null;
+
+            // Set the event processing thread to null as it is unusable now.
             processingThread = null;
         }
+
+        // Set the loop to null as a new one will be created for next session.
+        loop = null;
     }
 
     /**
@@ -1532,7 +1526,8 @@
                     // This should always work once the thread is interupted.
                     sessionWatchdogThread.join();
                 } catch (InterruptedException e) {
-                    // Should never happen.
+                    // This should never happen.
+                    e.printStackTrace();
                 }
             }
             // Set the thread object to null.
@@ -1559,7 +1554,8 @@
 
             } catch (InterruptedException e) {
                 // This probably just means that the disconnect button was pushed, and this thread should
-                // no longer wait on event processing to finish.
+                // no longer monitor the event processing.
+                e.printStackTrace();
             }
         }
     }
@@ -1767,4 +1763,4 @@
                 config.getWaitTime(), 
                 config.getChunkSize());
     }                      
-}
\ No newline at end of file
+}

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui
PlotInfoWindow.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui/PlotInfoWindow.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/gui/PlotInfoWindow.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -2,9 +2,12 @@
 
 import hep.aida.IAxis;
 import hep.aida.IBaseHistogram;
+import hep.aida.ICloud1D;
+import hep.aida.ICloud2D;
 import hep.aida.IDataPointSet;
 import hep.aida.IFunction;
 import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
 import hep.aida.jfree.plotter.ObjectStyle;
 import hep.aida.jfree.plotter.PlotterRegion;
 import hep.aida.ref.event.AIDAListener;
@@ -18,9 +21,11 @@
 import java.awt.event.ActionEvent;
 import java.awt.event.ActionListener;
 import java.util.EventObject;
+import java.util.List;
 import java.util.Timer;
 import java.util.TimerTask;
 
+import javax.swing.BorderFactory;
 import javax.swing.JComboBox;
 import javax.swing.JFrame;
 import javax.swing.JList;
@@ -32,12 +37,14 @@
 
 /**
  * <p>
- * Window for showing the statistics and other information about a plot.
+ * This is a GUI component for showing the statistics and other information about an AIDA plot.
  * <p>
- * This information will be dynamically updating using the <code>AIDAObserver</code> API
- * on the AIDA object.
+ * This information is updated dynamically via the <code>AIDAObserver</code> API on the AIDA object.
  */ 
-// FIXME: Add addRows for all types of AIDA objects (only Histogram1D implemented so far).
+// FIXME: Add addRows for all types of AIDA objects (only Histogram1D implemented so far). 
+// FIXME: Columns disappear when rebuilding table.
+// TODO: Add sorting of info table.
+// TODO: Probably this should be moved out of monitoring application as it is generically applicable to AIDA objects.
 public class PlotInfoWindow extends JFrame implements AIDAListener, ActionListener {
 
     JComboBox<Object> plotComboBox;
@@ -46,16 +53,23 @@
     JPanel contentPane = new JPanel();
     PlotterRegion currentRegion;
     Object currentObject;
-    static int INSET_SIZE = 5;
+    static final int INSET_SIZE = 5;
+    static final int BORDER_SIZE = 10;
 
     static final String[] COLUMN_NAMES = { "Field", "Value" };
 
     static final String PLOT_SELECTED = "PLOT_SELECTED";
+    
+    Timer timer = new Timer();
            
+    /**
+     * Class constructor, which will setup the GUI components.
+     */
     @SuppressWarnings("unchecked")
     PlotInfoWindow() {
         
         contentPane.setLayout(new GridBagLayout());
+        contentPane.setBorder(BorderFactory.createEmptyBorder(BORDER_SIZE, BORDER_SIZE, BORDER_SIZE, BORDER_SIZE));
         
         GridBagConstraints c;
         
@@ -75,62 +89,72 @@
                 return this;
             }
         });        
+        plotComboBox.addActionListener(this);
         c = new GridBagConstraints();
         c.gridx = 0;
         c.gridy = 0;
         c.fill = GridBagConstraints.HORIZONTAL;
-        c.insets = new Insets(INSET_SIZE, INSET_SIZE, INSET_SIZE, INSET_SIZE); 
+        c.insets = new Insets(0, 0, INSET_SIZE, 0);
         contentPane.add(plotComboBox, c);
 
         String data[][] = new String[0][0];
         model = new DefaultTableModel(data, COLUMN_NAMES);
         infoTable.setModel(model);
-        infoTable.getColumn("Field").setMinWidth(20);
+        
+        // FIXME: Are these adequate column size settings?  Could prob be bigger...
+        infoTable.getColumn("Field").setMinWidth(25);
         infoTable.getColumn("Value").setMinWidth(20);
+        
         c = new GridBagConstraints();
         c.gridx = 0;
         c.gridy = 1;
         c.fill = GridBagConstraints.BOTH;
-        c.insets = new Insets(0, 0, INSET_SIZE, 0);
         contentPane.add(infoTable, c);
         
         setContentPane(contentPane);        
         setAlwaysOnTop(true);
-        //this.setResizable(false);
+        setResizable(false);
         this.pack();
     }
     
     /**
-     * This method will be called when the backing AIDA object is updated,
-     * so the information in the table should be changed to reflect its new state.
+     * This method will be called when the backing AIDA object is updated and a 
+     * state change is fired via the <code>AIDAObservable</code> API.  The table
+     * is updated to reflect the new state of the object.
      * @param evt The EventObject pointing to the backing AIDA object.
      */
     @Override
-    public void stateChanged(final EventObject evt) {        
+    public void stateChanged(final EventObject evt) {
+        
+        // Make a timer task for running the update.
         TimerTask task = new TimerTask() {
             public void run() {
-                // Is this object connected to the correct AIDA observable?
+                // Is the state change from the current AIDAObservable?
                 if (evt.getSource() != PlotInfoWindow.this.currentObject) {
-                    // This should not ever happen but throw an error here just in case.
-                    throw new RuntimeException("The AIDAObservable is not attached to the right object!");
+                    // Assume this means that a different AIDAObservable was selected in the GUI.
+                    return;
                 }
                 
-                // Run the method to update the table with new plot information on the EDT.
+                // Update the table values on the Swing EDT.
                 runUpdateTable();
                 
-                // Set the observable to valid so we receive subsequent state changes.
+                // Set the observable to valid so subsequent state changes are received.
                 ((AIDAObservable) currentObject).setValid((AIDAListener) PlotInfoWindow.this);
             }
-        };
+        };        
         
         /* 
-         * Schedule the task to run in ~0.5 seconds.  If this is run immediately, somehow the
-         * observable state gets permanently set to invalid and we we will stop receiving any
-         * state changes! 
+         * Schedule the task to run in ~0.5 seconds.  If the Runnable runs immediately, somehow the
+         * observable state gets permanently set to invalid and additional state changes will not
+         * be received! 
          */
-        new Timer().schedule(task, 500);
+        timer.schedule(task, 500);
     }
 
+    /**
+     * Implementation of <code>actionPerformed</code> to handle the selection of
+     * a new object from the combo box. 
+     */
     @Override
     public void actionPerformed(ActionEvent e) {
         // Was a new item selected in the combo box?
@@ -142,6 +166,13 @@
         }
     }        
     
+    /**
+     * Get the title of an AIDA object.  Unfortunately there is 
+     * not base type with this information.
+     * @param object The AIDA object.
+     * @return The title of the object from its title method 
+     *          or value of its toString method, if none exists.
+     */
     String getObjectTitle(Object object) {
         if (object instanceof IBaseHistogram) {
             return ((IBaseHistogram)object).title();
@@ -154,7 +185,11 @@
         }
     }
 
-    void setCurrentRegion(PlotterRegion region) {        
+    /**
+     * Set the current plotter region, which will rebuild the GUI accordingly.
+     * @param region The current plotter region.
+     */
+    synchronized void setCurrentRegion(PlotterRegion region) {        
         if (region != currentRegion) {            
             currentRegion = region;
             if (currentRegion.title() != null)
@@ -165,15 +200,14 @@
         }
     }
 
-    void setupContentPane() {           
-        
+    /**
+     * Configure the frame's content panel from current component settings.
+     */
+    void setupContentPane() {                   
         plotComboBox.setSize(plotComboBox.getPreferredSize());
         infoTable.setSize(infoTable.getPreferredSize());
-        int width = plotComboBox.getPreferredSize().width + INSET_SIZE * 2;
-        int height = plotComboBox.getPreferredSize().height + infoTable.getPreferredSize().height + INSET_SIZE * 3;
-        //System.out.println("contentPane");
-        //System.out.println("  w: " + width);
-        //System.out.println("  h: " + height);
+        int width = plotComboBox.getPreferredSize().width;
+        int height = plotComboBox.getPreferredSize().height + INSET_SIZE + infoTable.getPreferredSize().height;
         contentPane.setPreferredSize(
                 new Dimension(
                         width,
@@ -185,14 +219,28 @@
         setVisible(true);      
     }
         
+    /**
+     * Update the info table from the state of the current AIDA object.
+     */
     void updateTable() {
         model.setRowCount(0);
         model.setColumnIdentifiers(COLUMN_NAMES);                
         if (currentObject instanceof IHistogram1D) {            
             addRows((IHistogram1D)currentObject);
+        } else if (currentObject instanceof IHistogram2D) {
+            addRows((IHistogram2D)currentObject);
+        } else if (currentObject instanceof ICloud2D) {
+            addRows((ICloud2D)currentObject);
+        } else if (currentObject instanceof ICloud1D) {
+            if (((ICloud1D)currentObject).isConverted()) {
+                addRows(((ICloud1D)currentObject).histogram());
+            }
         }
     }
     
+    /**
+     * Run the {@link #updateTable()} method on the Swing EDT.
+     */
     void runUpdateTable() {
         SwingUtilities.invokeLater(new Runnable() { 
             public void run() {
@@ -201,16 +249,33 @@
         });
     }
     
+    /**
+     * Update the combo box contents with the plots from the current region.
+     */
     void updateComboBox() {
         plotComboBox.removeAllItems();
-        for (ObjectStyle objectStyle : currentRegion.getObjectStyles()) {
-            Object object = objectStyle.object();
-            if (object instanceof IBaseHistogram) {
+        List<Object> objects = currentRegion.getPlottedObjects();        
+        for (Object object : objects) {
+            if (isValidObject(object)) {
                 this.plotComboBox.addItem(object);
             }
         }        
     }
+    
+    boolean isValidObject(Object object) {
+        if (object == null)
+            return false;
+        if (object instanceof IBaseHistogram || object instanceof IFunction || object instanceof IDataPointSet) {
+            return true;
+        } else {
+            return false;
+        }
+    }
 
+    /**
+     * Add rows to the info table from the state of a 1D histogram.
+     * @param histogram The AIDA object.
+     */
     void addRows(IHistogram1D histogram) {
         addRow("title", histogram.title());
         addRow("bins", histogram.axis().bins());
@@ -222,30 +287,98 @@
         addRow("overflow entries", histogram.binEntries(IAxis.OVERFLOW_BIN));
         addRow("underflow entries", histogram.binEntries(IAxis.UNDERFLOW_BIN));
     }
+    
+    /**
+     * Add rows to the info table from the state of a 2D histogram.
+     * @param histogram The AIDA object.
+     */
+    void addRows(IHistogram2D histogram) {
+        addRow("title", histogram.title());
+        addRow("x bins", histogram.xAxis().bins());
+        addRow("y bins", histogram.yAxis().bins());
+        addRow("entries", histogram.entries());
+        addRow("x mean", String.format("%.10f%n", histogram.meanX()));
+        addRow("y mean", String.format("%.10f%n", histogram.meanY()));
+        addRow("x rms", String.format("%.10f%n", histogram.rmsX()));
+        addRow("y rms", String.format("%.10f%n", histogram.rmsY()));
+        addRow("sum bin heights", histogram.sumBinHeights());
+        addRow("max bin height", histogram.maxBinHeight());
+        addRow("x overflow entries", histogram.binEntriesX(IAxis.OVERFLOW_BIN));
+        addRow("y overflow entries", histogram.binEntriesY(IAxis.OVERFLOW_BIN));
+        addRow("x underflow entries", histogram.binEntriesX(IAxis.UNDERFLOW_BIN));
+        addRow("y underflow entries", histogram.binEntriesY(IAxis.UNDERFLOW_BIN));
+    }
+    
+    /**
+     * Add rows to the info table from the state of a 2D cloud.
+     * @param cloud The AIDA object.
+     */
+    void addRows(ICloud2D cloud) {        
+        addRow("title", cloud.title());
+        addRow("entries", cloud.entries());
+        addRow("max entries", cloud.maxEntries());
+        addRow("x lower edge", cloud.lowerEdgeX());
+        addRow("x upper edge", cloud.upperEdgeX());
+        addRow("y lower edge", cloud.lowerEdgeY());        
+        addRow("y upper edge", cloud.upperEdgeY());
+        addRow("x mean", String.format("%.10f%n", cloud.meanX()));
+        addRow("y mean", String.format("%.10f%n", cloud.meanY()));
+        addRow("x rms", String.format("%.10f%n", cloud.rmsX()));
+        addRow("y rms", String.format("%.10f%n", cloud.rmsY()));
+    }
 
+    /**
+     * Add a row to the info table.
+     * @param field The field name.
+     * @param value The field value.
+     */
     void addRow(String field, Object value) {
         model.insertRow(infoTable.getRowCount(), new Object[] { field, value });
     }
     
-    void setCurrentObject(Object object) { 
+    /**
+     * Set the current AIDA object that backs this GUI, i.e. an IHistogram1D etc.
+     * @param object The backing AIDA object.
+     */
+    synchronized void setCurrentObject(Object object) {
+                        
         if (object == null)
             throw new IllegalArgumentException("The object arg is null!");       
+
         if (object == currentObject)
-            return;        
+            return;
+       
+        // Remove the AIDAListener from the previous object.
         removeListener();
+                              
+        // Set the current object reference.
         currentObject = object;        
+        
+        // Update the table immediately with information from the current object.
+        // We need to wait for this the first time, so we know the preferred size 
+        // of the table GUI component when resizing the content pane.
         updateTable();
+        
+        // Add an AIDAListener to the AIDA object via the AIDAObservable API.
         addListener();
     }
     
+    /**
+     * Remove this object as an <code>AIDAListener</code> on the current <code>AIDAObservable</code>.
+     */
     void removeListener() {
         if (currentObject != null) {
+            // Remove this object as a listener on the current observable.
             ((AIDAObservable)currentObject).removeListener(this);
         }
     }
     
+    /**
+     * Add this object as an <code>AIDAListener</code> on the current <code>AIDAObservable</code>.
+     */
     void addListener() {        
         if (currentObject instanceof AIDAObservable) {
+            // Setup a listener on the current AIDA object.
             AIDAObservable observable = (AIDAObservable)currentObject;
             observable.addListener(this);
             observable.setValid(this);

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/plotting
MonitoringAnalysisFactory.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/plotting/MonitoringAnalysisFactory.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/plotting/MonitoringAnalysisFactory.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,10 +1,6 @@
 package org.hps.monitoring.plotting;
 
-import org.jfree.chart.ChartFactory;
-import org.jfree.chart.renderer.xy.XYBarRenderer;
-
 import hep.aida.IPlotterFactory;
-import hep.aida.jfree.chart.DefaultChartTheme;
 import hep.aida.ref.AnalysisFactory;
 
 /**
@@ -28,14 +24,6 @@
     }
     
     /**
-     * Do some JFreeChart related configuration.
-     */
-    public static void configure() {
-        ChartFactory.setChartTheme(new DefaultChartTheme());
-        XYBarRenderer.setDefaultShadowsVisible(false);
-    }
-
-    /**
      * Create a named plotter factory for the monitoring application.
      */
     public IPlotterFactory createPlotterFactory(String name) {

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/plotting
MonitoringPlotFactory.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/plotting/MonitoringPlotFactory.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-app/src/main/java/org/hps/monitoring/plotting/MonitoringPlotFactory.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -44,7 +44,7 @@
      */
     MonitoringPlotFactory() {
         super();        
-        setEmbedded(true);
+        setIsEmbedded(true);
         setupRootPane("  ");
         if (regionListener != null)
             addPlotterRegionListener(regionListener);
@@ -57,7 +57,7 @@
     MonitoringPlotFactory(String name) {
         super();
         this.name = name;
-        setEmbedded(true);
+        setIsEmbedded(true);
         setupRootPane(name);
         if (regionListener != null)
             addPlotterRegionListener(regionListener);

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-drivers
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-drivers/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-drivers/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -2,7 +2,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-monitoring-drivers</artifactId>
     <name>monitoring-drivers</name>
-    <description>Drivers for the MonitoringApplication</description>
+    <description>org.lcsim Drivers for using in the monitoring application</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>

java/branches/hps_java_trunk_HPSJAVA-255/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/example
SimplePlotDriver.java removed after 1243
--- java/branches/hps_java_trunk_HPSJAVA-255/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/example/SimplePlotDriver.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/monitoring-drivers/src/main/java/org/hps/monitoring/drivers/example/SimplePlotDriver.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,46 +0,0 @@
-package org.hps.monitoring.drivers.example;
-
-import hep.aida.IHistogram1D;
-import hep.aida.IPlotter;
-import hep.aida.IPlotterFactory;
-
-import org.lcsim.event.EventHeader;
-import org.lcsim.event.RawCalorimeterHit;
-import org.lcsim.event.RawTrackerHit;
-import org.lcsim.util.Driver;
-import org.lcsim.util.aida.AIDA;
-
-public class SimplePlotDriver extends Driver {
-    
-    static String ecalCollectionName = "EcalReadoutHits";
-    static String svtCollectionName = "SVTRawTrackerHits";
-    
-    AIDA aida = AIDA.defaultInstance();
-    IHistogram1D svtHitsPlot;
-    IHistogram1D ecalHitsPlot;
-    IHistogram1D ecalEnergyPlot;
-    
-    public void startOfData() {
-        ecalHitsPlot = aida.histogram1D("ECAL Hits per Event", 20, 0., 20.);
-        svtHitsPlot = aida.histogram1D("SVT Hits per Event", 200, 0., 200.);
-        
-        IPlotterFactory plotterFactory = aida.analysisFactory().createPlotterFactory("Monitoring Test Plots");
-        
-        IPlotter plotter = plotterFactory.create("ECAL");
-        plotter.createRegion();
-        plotter.region(0).plot(ecalHitsPlot);
-        plotter.show();
-        
-        plotter = plotterFactory.create("SVT");
-        plotter.createRegion();
-        plotter.region(0).plot(svtHitsPlot);
-        plotter.show();
-    }
-    
-    public void process(EventHeader event) {
-        if (event.hasCollection(RawTrackerHit.class, svtCollectionName))            
-            svtHitsPlot.fill(event.get(RawTrackerHit.class, svtCollectionName).size());
-        if (event.hasCollection(RawCalorimeterHit.class,  ecalCollectionName))
-            ecalHitsPlot.fill(event.get(RawCalorimeterHit.class, ecalCollectionName).size());
-    }
-}
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-255/parent
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/parent/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/parent/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -8,10 +8,12 @@
     <packaging>pom</packaging>
     <version>3.0.3-SNAPSHOT</version>
     <name>parent</name>
+    <description>HPS Java parent POM</description>
     <properties>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <org.lcsim.cacheDir>${user.home}</org.lcsim.cacheDir>
         <lcsimVersion>3.0.5-SNAPSHOT</lcsimVersion>
+        <skipSite>false</skipSite>
     </properties>
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/parent/</url>
@@ -70,6 +72,11 @@
             <artifactId>junit</artifactId>
             <version>4.11</version>
         </dependency>
+        <dependency>
+            <groupId>org.lcsim</groupId>
+            <artifactId>lcsim-distribution</artifactId>
+            <version>${lcsimVersion}</version>
+        </dependency>
     </dependencies>
     <!-- DO NOT EDIT THESE DEPENDENCY VERSIONS MANUALLY. -->
     <dependencyManagement>
@@ -218,6 +225,8 @@
                     <version>3.3</version>
                     <configuration>
                         <chmod>false</chmod>
+                        <skip>${skipSite}</skip>
+                        <skipDeploy>${skipSite}</skipDeploy>
                     </configuration>
                 </plugin>
                 <plugin>

java/branches/hps_java_trunk_HPSJAVA-255/plugin
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/plugin/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/plugin/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -2,7 +2,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-plugin</artifactId>
     <name>plugin</name>
-    <description>HPS JAS3 Plugin</description>
+    <description>JAS3 Plugin</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
@@ -59,7 +59,7 @@
                         <include>org.hps:hps-record-util</include>
                         <include>org.hps:hps-tracking</include>
                         <include>org.hps:hps-users</include>
-                        <include>org.hps:hps-util</include>                                                                                                                                              
+                        <include>org.hps:hps-util</include>
                         <include>mysql:mysql-connector-java</include>
                         <include>org.jlab.coda:jevio</include>
                         <include>org.jlab.coda:et</include>                        
@@ -124,6 +124,20 @@
                     </plugin>
                 </plugins>
             </build>
-        </profile>
+        </profile>        
+        <profile>
+            <id>test-release</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.freehep</groupId>
+                        <artifactId>freehep-jas-plugin</artifactId>
+                        <configuration>
+                            <skip>true</skip>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>        
     </profiles>
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/plugin/src/main/java/org/hps/plugin
HPSPlugin.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/plugin/src/main/java/org/hps/plugin/HPSPlugin.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/plugin/src/main/java/org/hps/plugin/HPSPlugin.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -3,7 +3,7 @@
 import org.freehep.application.studio.Plugin;
 
 /**
- *
+ * This is just a dummy plugin class to make JAS3 happy.
  * @author Jeremy McCormick
  * @version $Id: HPSPlugin.java,v 1.1 2013/06/03 16:23:47 jeremy Exp $
  */

java/branches/hps_java_trunk_HPSJAVA-255
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,4 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-
     <modelVersion>4.0.0</modelVersion>
     <groupId>org.hps</groupId>
     <artifactId>hps-modules</artifactId>
@@ -7,27 +6,23 @@
     <name>HPS Java Project</name>
     <description>HPS module build</description>
     <url>http://www.lcsim.org/sites/hps/</url>
-
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>   
-
     <scm>
         <url>svn://svn.freehep.org/hps/java/trunk/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/</developerConnection>
     </scm>
-
     <distributionManagement>
         <site>
             <id>lcsim-site</id>
             <url>dav:http://srs.slac.stanford.edu/nexus/content/sites/lcsim-site/hps/</url>
         </site>
     </distributionManagement>
-
     <build>
         <pluginManagement>
             <plugins>
@@ -46,7 +41,6 @@
             </plugins>
         </pluginManagement>
     </build>
-
     <reporting>
         <plugins>
             <plugin>
@@ -122,52 +116,26 @@
             </plugin>
         </plugins>
     </reporting>
-    
-    <profiles>
-        <profile>
-            <id>default</id>
-            <activation>
-                <activeByDefault>true</activeByDefault>
-            </activation>
-            <modules>
-                <module>parent</module>
-                <module>util</module>
-                <module>detector-data</module>
-                <module>conditions</module>
-                <module>ecal-readout-sim</module>
-                <module>ecal-recon</module>
-                <module>evio</module>
-                <module>recon</module>
-                <module>tracking</module>
-                <module>ecal-event-display</module>
-                <module>monitoring-drivers</module>
-                <module>record-util</module>
-                <module>monitoring-app</module>
-                <module>analysis</module>
-                <module>users</module>
-                <module>steering-files</module>
-                <module>distribution</module>
-                <module>plugin</module>
-                <module>integration-tests</module>
-            </modules>
-        </profile>
-        <profile>
-            <id>site</id>
-            <modules>
-                <module>analysis</module>
-                <module>conditions</module>
-                <module>ecal-readout-sim</module>
-                <module>ecal-recon</module>
-                <module>evio</module>
-                <module>monitoring-app</module>
-                <module>monitoring-drivers</module>
-                <module>plugin</module>                                                                              
-                <module>recon</module>
-                <module>tracking</module>
-                <module>users</module>                
-                <module>util</module>
-            </modules>
-        </profile>
-    </profiles>
-
+    <modules>
+        <module>analysis</module>
+        <module>conditions</module>
+        <module>datacat</module>
+        <module>detector-data</module>
+        <module>distribution</module>
+        <module>ecal-event-display</module>
+        <module>ecal-readout-sim</module>
+        <module>ecal-recon</module>
+        <module>evio</module>
+        <module>integration-tests</module>
+        <module>monitoring-app</module>
+        <module>monitoring-drivers</module>
+        <module>parent</module>
+        <module>plugin</module>
+        <module>recon</module>
+        <module>record-util</module>
+        <module>steering-files</module>
+        <module>tracking</module>
+        <module>users</module>
+        <module>util</module>
+    </modules>
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/recon
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/recon/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/recon/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -3,7 +3,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-recon</artifactId>
     <name>recon</name>
-    <description>HPS recon code</description>
+    <description>reconstruction algorithms</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>

java/branches/hps_java_trunk_HPSJAVA-255/recon/src/main/java/org/hps/recon/particle
HpsReconParticleDriver.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/recon/src/main/java/org/hps/recon/particle/HpsReconParticleDriver.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/recon/src/main/java/org/hps/recon/particle/HpsReconParticleDriver.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -22,7 +22,7 @@
 
 
 /**
- * 
+ * The Main HPS implementation of ReconParticleDriver...makes V0 candidates and does vertex fits
  * @author Omar Moreno <[log in to unmask]>
  * @version $Id$
  */
@@ -39,12 +39,12 @@
 	@Override
 	protected void startOfData(){
 		
-		unconstrainedV0CandidatesColName    = "UnconstrainedV0Candidates";
-		beamConV0CandidatesColName   		= "BeamspotConstrainedV0Candidates";
-		targetConV0CandidatesColName 		= "TargetConstrainedV0Candidates";	
-		unconstrainedV0VerticesColName 		= "UnconstrainedV0Vertices";
-		beamConV0VerticesColName 			= "BeamspotConstrainedV0Vertices";
-		targetConV0VerticesColName			= "TargetConstrainedV0Vertices";
+//		unconstrainedV0CandidatesColName    = "UnconstrainedV0Candidates";
+//		beamConV0CandidatesColName   		= "BeamspotConstrainedV0Candidates";
+//		targetConV0CandidatesColName 		= "TargetConstrainedV0Candidates";	
+//		unconstrainedV0VerticesColName 		= "UnconstrainedV0Vertices";
+//		beamConV0VerticesColName 			= "BeamspotConstrainedV0Vertices";
+//		targetConV0VerticesColName			= "TargetConstrainedV0Vertices";
 	}
 
 	/**

java/branches/hps_java_trunk_HPSJAVA-255/recon/src/main/java/org/hps/recon/particle
ReconParticleDriver.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -41,22 +41,28 @@
     List<ReconstructedParticle> electrons;
     List<ReconstructedParticle> positrons;
     List<Vertex> unconstrainedV0Vertices;
-    List<Vertex> beamConV0Vertices; 
+    List<Vertex> beamConV0Vertices;
     List<Vertex> targetConV0Vertices;
 
     // Collections
     String ecalClustersCollectionName = "EcalClusters";
     String tracksCollectionName = "MatchedTracks";
     String finalStateParticlesColName = "FinalStateParticles";
-    String unconstrainedV0CandidatesColName = null;
-    String beamConV0CandidatesColName = null;
-    String targetConV0CandidatesColName = null;
-    String vertexCandidatesColName = null;
-    String vertexBeamConsCandidatesName = null;
-	String unconstrainedV0VerticesColName = null;
-	String beamConV0VerticesColName = null;
-	String targetConV0VerticesColName = null;
-	
+    String unconstrainedV0CandidatesColName = "UnconstrainedV0Candidates";
+    String beamConV0CandidatesColName = "BeamspotConstrainedV0Candidates";
+    String targetConV0CandidatesColName = "TargetConstrainedV0Candidates";
+    String unconstrainedV0VerticesColName = "UnconstrainedV0Vertices";
+    String beamConV0VerticesColName = "BeamspotConstrainedV0Vertices";
+    String targetConV0VerticesColName = "TargetConstrainedV0Vertices";
+//    String unconstrainedV0CandidatesColName = null;
+//    String beamConV0CandidatesColName = null;
+//    String targetConV0CandidatesColName = null;
+//    String vertexCandidatesColName = null;
+//    String vertexBeamConsCandidatesName = null;
+//	String unconstrainedV0VerticesColName = null;
+//	String beamConV0VerticesColName = null;
+//	String targetConV0VerticesColName = null;
+
     // The beamsize array is in the tracking frame
     /* TODO  mg-May 14, 2014:  the the beam size from the conditions db...also beam position!  */
     double[] beamsize = {0.001, 0.2, 0.02};
@@ -94,9 +100,43 @@
         this.ecalClustersCollectionName = ecalClustersCollectionName;
     }
 
-    public void setTrackCollectoinName(String tracksCollectionName) {
+    public void setTracksCollectionName(String tracksCollectionName) {
         this.tracksCollectionName = tracksCollectionName;
     }
+    
+    public void setFinalStateParticlesColName(String finalStateParticlesColName) {
+        this.finalStateParticlesColName = finalStateParticlesColName;
+    }
+    
+     
+    public void setUnconstrainedV0CandidatesColName(String unconstrainedV0CandidatesColName) {
+        this.unconstrainedV0CandidatesColName = unconstrainedV0CandidatesColName;
+    }
+    
+    
+    public void setBeamConV0CandidatesColName(String beamConV0CandidatesColName) {
+        this.beamConV0CandidatesColName = beamConV0CandidatesColName;
+    }
+    
+     
+    public void setTargetConV0CandidatesColName(String targetV0CandidatesColName) {
+        this.targetConV0CandidatesColName = targetConV0CandidatesColName;
+    }
+    
+    
+    public void setUnconstrainedV0VerticesColName(String unconstrainedV0VerticesColName) {
+        this.unconstrainedV0VerticesColName = unconstrainedV0VerticesColName;
+    }
+    
+    
+    public void setBeamConV0VerticesColName(String beamConV0VerticesColName) {
+        this.beamConV0VerticesColName = beamConV0VerticesColName;
+    }
+    
+     
+    public void setTargetConV0VerticesColName(String targetV0VerticesColName) {
+        this.targetConV0VerticesColName = targetConV0VerticesColName;
+    }
 
     @Override
     protected void detectorChanged(Detector detector) {
@@ -112,8 +152,8 @@
 
         // All events should have a collection of Ecal clusters.  If the event 
         // doesn't have one, skip the event.
-        if (!event.hasCollection(Cluster.class, ecalClustersCollectionName)) 
-        	return;
+        if (!event.hasCollection(Cluster.class, ecalClustersCollectionName))
+            return;
 
         // Get the collection of Ecal clusters from the event. A triggered 
         // event should have Ecal clusters.  If it doesn't, skip the event.
@@ -123,7 +163,7 @@
 
         // Get the collection of tracks from the event
         List<Track> tracks = event.get(Track.class, tracksCollectionName);
-        this.printDebug("Number of Tracks: " + tracks.size());
+        this.printDebug("Number of Tracks in "+tracksCollectionName+" : " + tracks.size());
 
         finalStateParticles = new ArrayList<ReconstructedParticle>();
         electrons = new ArrayList<ReconstructedParticle>();
@@ -145,40 +185,41 @@
         // Loop through the list of final state particles and separate the
         // charged particles to either electrons or positrons.  These lists
         // will be used for vertexing purposes.
-        for (ReconstructedParticle finalStateParticle : finalStateParticles) {
-            if (finalStateParticle.getCharge() > 0) positrons.add(finalStateParticle);
-            else if (finalStateParticle.getCharge() < 0) electrons.add(finalStateParticle);
-        }
+        for (ReconstructedParticle finalStateParticle : finalStateParticles)
+            if (finalStateParticle.getCharge() > 0)
+                positrons.add(finalStateParticle);
+            else if (finalStateParticle.getCharge() < 0)
+                electrons.add(finalStateParticle);
         this.printDebug("Number of Electrons: " + electrons.size());
         this.printDebug("Number of Positrons: " + positrons.size());
-        
+
         // Vertex electron and positron candidates 
         findVertices(electrons, positrons);
 
         // If the list exist, put the vertexed candidates and vertices into the event
-        if (unconstrainedV0CandidatesColName != null){
+        if (unconstrainedV0CandidatesColName != null) {
             this.printDebug("Total number of unconstrained V0 candidates: " + unconstrainedV0Candidates.size());
             event.put(unconstrainedV0CandidatesColName, unconstrainedV0Candidates, ReconstructedParticle.class, 0);
         }
-        if (beamConV0CandidatesColName != null){
+        if (beamConV0CandidatesColName != null) {
             this.printDebug("Total number of beam constrained V0 candidates: " + unconstrainedV0Candidates.size());
             event.put(beamConV0CandidatesColName, beamConV0Candidates, ReconstructedParticle.class, 0);
         }
-        if (targetConV0CandidatesColName != null){
+        if (targetConV0CandidatesColName != null) {
             this.printDebug("Total number of target constrained V0 candidates: " + unconstrainedV0Candidates.size());
             event.put(targetConV0CandidatesColName, targetConV0Candidates, ReconstructedParticle.class, 0);
         }
-        if(unconstrainedV0VerticesColName != null){
-        	this.printDebug("Total number of unconstrained V0 vertices: " + unconstrainedV0Vertices.size());
-        	event.put(unconstrainedV0VerticesColName, unconstrainedV0Vertices, Vertex.class, 0);
+        if (unconstrainedV0VerticesColName != null) {
+            this.printDebug("Total number of unconstrained V0 vertices: " + unconstrainedV0Vertices.size());
+            event.put(unconstrainedV0VerticesColName, unconstrainedV0Vertices, Vertex.class, 0);
         }
-        if(beamConV0VerticesColName != null){
-        	this.printDebug("Total number of beam constrained V0 vertices: " + beamConV0Vertices.size());
-        	event.put(beamConV0VerticesColName, beamConV0Vertices, Vertex.class, 0);
+        if (beamConV0VerticesColName != null) {
+            this.printDebug("Total number of beam constrained V0 vertices: " + beamConV0Vertices.size());
+            event.put(beamConV0VerticesColName, beamConV0Vertices, Vertex.class, 0);
         }
-        if(targetConV0VerticesColName != null){
-        	this.printDebug("Total number of target constrained V0 vertices: " + beamConV0Vertices.size());
-        	event.put(targetConV0VerticesColName, targetConV0Vertices, Vertex.class, 0);
+        if (targetConV0VerticesColName != null) {
+            this.printDebug("Total number of target constrained V0 vertices: " + beamConV0Vertices.size());
+            event.put(targetConV0VerticesColName, targetConV0Vertices, Vertex.class, 0);
         }
     }
 
@@ -198,19 +239,17 @@
 
         // Instantiate the list of unmatched  clusters.  Remove if we find track match
         List<Cluster> unmatchedClusters = new ArrayList<Cluster>(clusters);
-       
 
         for (Track track : tracks) {
-            
+
             ReconstructedParticle particle = new BaseReconstructedParticle();
             HepLorentzVector fourVector = new BasicHepLorentzVector(0, 0, 0, 0);
-            
+
             //
             // Add all track information to the ReconstructedParticle
             //
-            
             particle.addTrack(track);
-            
+
             // Set the momentum of the ReconstructedParticle
             Hep3Vector momentum = new BasicHep3Vector(track.getTrackStates().get(0).getMomentum());
             momentum = CoordinateTransformations.transformVectorToDetector(momentum);
@@ -230,7 +269,7 @@
 
                 // Get the position of the Ecal cluster
                 Hep3Vector clusterPosition = new BasicHep3Vector(cluster.getPosition());
-                
+
                 // Extrapolate the track to the Ecal cluster position
                 Hep3Vector trackPosAtEcal = TrackUtils.extrapolateTrack(track, clusterPosition.z());
                 this.printDebug("Ecal cluster position: " + clusterPosition.toString());
@@ -257,17 +296,17 @@
                     this.printDebug("Track and Ecal cluster are in opposite volumes. Track Y @ ECAL = " + trackPosAtEcal.z());
                     continue;
                 }
-                
+
                 // TODO: Checking whether r < rMax should be occuring within isMatch.  isMatch 
                 // 		 is basically repeating a lot of the same code as above.
-                if (r < rMax && isMatch(cluster,track)) {
-                	rMax = r;
-                	matchedCluster = cluster;
+                if (r < rMax && isMatch(cluster, track)) {
+                    rMax = r;
+                    matchedCluster = cluster;
                 }
             }
-            
+
             if (matchedCluster != null) {
-            	particle.addCluster(matchedCluster);
+                particle.addCluster(matchedCluster);
                 ((BasicHepLorentzVector) fourVector).setT(matchedCluster.getEnergy());
                 unmatchedClusters.remove(matchedCluster);
             }
@@ -303,14 +342,14 @@
     }
 
     /**
-     * 
+     *
      */
     boolean isMatch(Cluster cluster, Track track) {
-     
-    	// Get the position of the Ecal cluster
-    	Hep3Vector clusterPosition = new BasicHep3Vector(cluster.getPosition());
-        
-    	// Extrapolate the track to the Ecal cluster position
+
+        // Get the position of the Ecal cluster
+        Hep3Vector clusterPosition = new BasicHep3Vector(cluster.getPosition());
+
+        // Extrapolate the track to the Ecal cluster position
         Hep3Vector trackPosAtEcal = TrackUtils.extrapolateTrack(track, clusterPosition.z());
 
         double dxCut = 20.0;
@@ -318,10 +357,10 @@
 
         if (Math.abs(trackPosAtEcal.x() - clusterPosition.x()) > dxCut)
             return false;
-        
+
         if (Math.abs(trackPosAtEcal.y() - clusterPosition.y()) > dyCut)
             return false;
-        
+
         return true;
     }
 }

java/branches/hps_java_trunk_HPSJAVA-255/record-util
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/record-util/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/record-util/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -2,7 +2,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-record-util</artifactId>
     <name>record-util</name>
-    <description>Record processing utilities</description>
+    <description>record processing utilities for EVIO, LCIO and ET events</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>

java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record
AbstractRecordQueue.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -14,7 +14,6 @@
  * {@link #next()} method to get the next record, which might not be immediately
  * available.
  */
-// TODO: Add max elements argument to limit pile up of unconsumed events.
 public abstract class AbstractRecordQueue<RecordType> extends AbstractRecordSource {
 
     // The queue, which is a linked list with blocking behavior. 
@@ -26,6 +25,7 @@
     // The amount of time to wait for an LCIO event from the queue before dying.
     long timeOutMillis = -1;
     
+    
     /**
      * Constructor that takes the timeout time in seconds.
      * @param timeoutSeconds the timeout time in seconds

java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite
CompositeLoop.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/CompositeLoop.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/CompositeLoop.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -33,16 +33,12 @@
     
     boolean paused = false;
     boolean stopOnErrors = true;
-    boolean done = false;
     
     CompositeLoopConfiguration config = null;
-            
-    // Look in javadoc API and DefaultRecordLoop for what this does.
-    //this._stopOnEOF
-                
+                            
     /**
      * No argument constructor.  
-     * The {@link #configure(CompositeLoopConfiguration)} method must be
+     * The {@link #setCompositeLoopConfiguration(CompositeLoopConfiguration)} method must be
      * called on the loop manually.
      */
     public CompositeLoop() {
@@ -55,7 +51,7 @@
      */
     public CompositeLoop(CompositeLoopConfiguration config) {
         setRecordSource(recordSource);
-        configure(config);
+        setCompositeLoopConfiguration(config);
     }
     
     /**
@@ -108,7 +104,6 @@
         
         // Stop the event processing.
         this.execute(Command.STOP);
-        done = true;
     }
 
     /**
@@ -129,7 +124,6 @@
         
         // Stop the event processing.
         this.execute(Command.STOP);
-        done = true;
     }        
     
     /**
@@ -175,17 +169,8 @@
             return false;
         }
     }
-        
+            
     /**
-     * True if the loop is done processing.  This is 
-     * set to <code>true</code> when fatal errors occur.
-     * @return
-     */
-    public boolean isDone() {
-        return done;
-    }
-    
-    /**
      * Get the last error that occurred.
      * @return The last error that occurred.
      */
@@ -230,12 +215,20 @@
         }
         return getSupplied();
     }
-        
+    
+    public void setConfiguration(Object object) {
+        if (object instanceof CompositeLoopConfiguration) {
+            setCompositeLoopConfiguration((CompositeLoopConfiguration)object);
+        } else {
+            throw new IllegalArgumentException("Wrong type of object to configure CompositeLoop: " + object.getClass().getCanonicalName());
+        }
+    }
+    
     /**
      * Configure the loop using a {@link CompositeLoopConfiguration} object.
      * @param config The CompositeLoopConfiguration object containing the loop configuration parameter values.
      */
-    public final void configure(CompositeLoopConfiguration config) {
+    void setCompositeLoopConfiguration(CompositeLoopConfiguration config) {
         
         if (this.config != null)
             throw new RuntimeException("CompositeLoop has already been configured.");

java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite
CompositeLoopAdapter.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/CompositeLoopAdapter.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/CompositeLoopAdapter.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -32,6 +32,7 @@
      * @param loopEvent 
      */
     public void finish(LoopEvent loopEvent) {
+        System.out.println(this.getClass().getCanonicalName() + ".finish");
         // Call end job hook on all processors.
         for (CompositeRecordProcessor processor : processors) {
             processor.endJob();

java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite
CompositeLoopConfiguration.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/CompositeLoopConfiguration.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/CompositeLoopConfiguration.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -24,6 +24,7 @@
  * may end up being ignored (e.g. setting a file path
  * when actually using an ET server, etc.).
  */
+// TODO: Add lcsim steering setting that uses JobControlManager to create Driver list.
 public class CompositeLoopConfiguration {
         
     boolean stopOnErrors = true;

java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite
EventProcessingThread.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/EventProcessingThread.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/record-util/src/main/java/org/hps/record/composite/EventProcessingThread.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,7 @@
 package org.hps.record.composite;
 
+import org.freehep.record.loop.RecordLoop;
+
 /**
  * Class for running the {@link CompositeLoop} on a separate thread.
  */
@@ -22,33 +24,29 @@
     @Override
     public void run() {                
                 
-        // Keep looping until the event processing is flagged as done.
-        while (true) {
-            // Is the processing unpaused?            
-            // TODO: See if can check for IDLE state. (???)
-            if (!loop.isPaused()) {
+        // Flag that is turned on when looping starts.
+        boolean started = false;
+        
+        // Keep looping until the event processing is done.
+        while (true) {                        
+            
+            // If the loop was started and now is in the IDLE state, it means
+            // that STOP was executed, so break from the processing while loop.
+            if (started && loop.getState().equals(RecordLoop.State.IDLE)) {                
+                // Stop record processing.
+                break;
+            }
+                        
+            // Is the processing unpaused?
+            if (!loop.isPaused()) {                                                
                 
+                // Set a flag to indicate that looping has started.
+                started = true;
+                
                 // Loop until done, error occurs, or pause is requested.
                 // FIXME: The maximum number of records should be used here.
                 loop.loop(-1);
-                
-                // If paused, current record will still be completed!
-                
-                // Is loop done?
-                // TODO: See if can check for IDLE state instead.
-                if (loop.isDone()) {
-                    // Stop record processing.
-                    break;
-                }
-            }
-            
-            // Sleep for a little while between loop iterations (e.g. while paused).
-            try {
-                Thread.sleep(100);
-            } catch (InterruptedException e) {
-                e.printStackTrace();
-            }
+            }            
         }
-        
     }
 }
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-255/steering-files
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,22 +1,18 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-steering-files</artifactId>
     <name>steering-files</name>
-    <description>XML steering files</description>
-    
+    <description>org.lcsim XML steering file resources</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>
-    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/steering-files/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/steering-files/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/steering-files/</developerConnection>
     </scm>
-    
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/monitoring
ExampleEcalMonitoringPlots.lcsim removed after 1243
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/monitoring/ExampleEcalMonitoringPlots.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/monitoring/ExampleEcalMonitoringPlots.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,37 +0,0 @@
-<lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" 
-       xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
-    <execute>
-        <driver name="EventMarkerDriver"/>
-        <driver name="CalibrationDriver"/>
-        <driver name="EcalRawConverter"/> 
-        <driver name="EcalClusterer"/> 
-        <driver name="EcalMonitoringPlots"/>
-        <driver name="EcalHitPlots"/>             
-        <driver name="EcalClusterPlots"/>
-    </execute>      
-    <drivers>
-       <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
-            <eventInterval>1</eventInterval>
-        </driver>
-        <driver name="CalibrationDriver" type="org.hps.conditions.deprecated.CalibrationDriver"/>           
-        <driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
-            <applyBadCrystalMap>false</applyBadCrystalMap>
-        </driver>        
-        <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterIC">            
-            <ecalName>Ecal</ecalName>
-            <ecalCollectionName>EcalCalHits</ecalCollectionName>
-        </driver>        
-        <driver name="EcalMonitoringPlots" type="org.hps.monitoring.ecal.plots.EcalMonitoringPlots">
-            <inputCollection>EcalCalHits</inputCollection>
-            <eventRefreshRate>100</eventRefreshRate>
-        </driver>        
-        <driver name="EcalHitPlots" type="org.hps.monitoring.ecal.plots.EcalHitPlots">
-            <maxE>2.0</maxE>
-            <logScale>true</logScale>
-        </driver>        
-        <driver name="EcalClusterPlots" type="org.hps.monitoring.ecal.plots.EcalClusterPlots">
-            <maxE>2.0</maxE>
-            <logScale>false</logScale>
-        </driver>
-    </drivers>
-</lcsim>

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/monitoring
ExampleMonitoringPlots.lcsim 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/monitoring/ExampleMonitoringPlots.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/monitoring/ExampleMonitoringPlots.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,9 +1,15 @@
 <lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" 
        xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">       
     <execute>
-        <driver name="SimplePlotDriver"/>
+        <driver name="EventMarkerDriver"/>
+        <driver name="TestRunReconDriver"/>
+        <driver name="ExamplePlotDriver"/>
     </execute>   
     <drivers>
-        <driver name="SimplePlotDriver" type="org.hps.monitoring.drivers.example.SimplePlotDriver" />
+        <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
+            <eventInterval>1</eventInterval>
+        </driver>
+        <driver name="TestRunReconDriver" type="org.hps.users.jeremym.TestRunReconDriver"/>
+        <driver name="ExamplePlotDriver" type="org.hps.monitoring.drivers.example.ExamplePlotDriver" />
     </drivers>
-</lcsim>
+</lcsim>
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon
HPS2014OfflineNoPileupRecon.lcsim 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineNoPileupRecon.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineNoPileupRecon.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,7 @@
 <!-- 
   Offline reconstruction for 2014 (electron run) data.
+    Updated on 16Oct to include new clustering with corrections. -HS <[log in to unmask]>
+  
   @author Sho Uemura <[log in to unmask]>
   @version $Id: HPS2014OfflineRecon.lcsim,v 1.7 2013/10/30 16:23:32 phansson Exp $
 -->
@@ -58,12 +60,12 @@
             <useTimestamps>true</useTimestamps>
             <useTruthTime>false</useTruthTime>
         </driver>
-        <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterICBasic">
+        <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterIC">
             <ecalName>Ecal</ecalName>
             <ecalCollectionName>EcalCalHits</ecalCollectionName>
             <timeCut>true</timeCut>
         </driver>
-        <driver name="ReconParticle" type="org.hps.recon.particle.HpsReconParticleDriver">          
+        <driver name="ReconParticle" type="org.hps.recon.particle.HpsReconParticleDriverIC">          
         </driver>
         <driver name="TrackDataDriver" type="org.hps.recon.tracking.TrackDataDriver" />
         <driver name="LCIOWriter" type="org.lcsim.util.loop.LCIODriver">

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon
HPS2014OfflineRecon.lcsim 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineRecon.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineRecon.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,6 @@
 <!-- 
   Offline reconstruction for 2014 (electron run) data.
+  Updated on 16Oct to include new clustering with corrections. -HS <[log in to unmask]>
   @author Sho Uemura <[log in to unmask]>
   @version $Id: HPS2014OfflineRecon.lcsim,v 1.7 2013/10/30 16:23:32 phansson Exp $
 -->
@@ -62,12 +63,12 @@
             <useTimestamps>true</useTimestamps>
             <useTruthTime>false</useTruthTime>
         </driver>
-        <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterICBasic">
+        <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterIC">
             <ecalName>Ecal</ecalName>
             <ecalCollectionName>EcalCalHits</ecalCollectionName>
             <timeCut>true</timeCut>
         </driver>
-        <driver name="ReconParticle" type="org.hps.recon.particle.HpsReconParticleDriver">          
+        <driver name="ReconParticle" type="org.hps.recon.particle.HpsReconParticleDriverIC">          
         </driver>
         <driver name="TrackDataDriver" type="org.hps.recon.tracking.TrackDataDriver" />
         <driver name="LCIOWriter" type="org.lcsim.util.loop.LCIODriver">

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon
HPS2014OfflineTruthRecon.lcsim 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineTruthRecon.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineTruthRecon.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,7 @@
 <!-- 
   Offline reconstruction for 2014 (electron run) data.
+    Updated on 16Oct to include new clustering with corrections. -HS <[log in to unmask]>
+  
   @author Sho Uemura <[log in to unmask]>
   @version $Id: HPS2014OfflineRecon.lcsim,v 1.7 2013/10/30 16:23:32 phansson Exp $
 -->
@@ -68,12 +70,12 @@
             <useTimestamps>false</useTimestamps>
             <useTruthTime>true</useTruthTime>
         </driver>
-        <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterICBasic">
+        <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterIC">
             <ecalName>Ecal</ecalName>
             <ecalCollectionName>EcalCalHits</ecalCollectionName>
             <timeCut>true</timeCut>
         </driver>
-        <driver name="ReconParticle" type="org.hps.recon.particle.HpsReconParticleDriver">          
+        <driver name="ReconParticle" type="org.hps.recon.particle.HpsReconParticleDriverIC">          
         </driver>
         <driver name="TrackDataDriver" type="org.hps.recon.tracking.TrackDataDriver" />
         <driver name="LCIOWriter" type="org.lcsim.util.loop.LCIODriver">

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/mgraham
DataQualityMonitor.lcsim 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/mgraham/DataQualityMonitor.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/mgraham/DataQualityMonitor.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -6,26 +6,30 @@
        xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
     <execute>
         <driver name="EventMarkerDriver"/>
-        <driver name="DQMDatabaseDriver"/>  
+<!--        <driver name="DQMDatabaseDriver"/>   -->
         <driver name="CalibrationDriver"/> 
          <driver name="RawTrackerHitSensorSetup"/>
-        <driver name="BadChannelFilter" /> 
+<!--        <driver name="BadChannelFilter" />  -->
          <driver name="RawTrackerHitFitterDriver" />
          <driver name="TrackerHitDriver"/>
          <driver name="HelicalTrackHitDriver"/>
          <driver name="TrackerReconDriver"/>
-          <driver name="TrackDataDriver"/>
+          <driver name="TrackDataDriver"/>         
         <driver name="EcalRawConverter" />
-         <driver name="EcalClusterer" />
-          <driver name="ReconParticle" />      
-        <driver name="SVTMonitoring"/>  
+         <driver name="EcalClusterer" /> 
+          <driver name="ReconParticle" />     
+<!--        <driver name="SVTMonitoring"/>  
        <driver name="SVTHitMCEfficiency"/>  
         <driver name="TrackingMonitoring"/> 
         <driver name="TrackingResiduals"/> 
         <driver name="TrackMCEfficiency"/>
         <driver name="FinalStateMonitoring"/>  
-        <driver name="V0Monitoring"/>         
-        <driver name="AidaSaveDriver"/>
+        <driver name="V0Monitoring"/>         -->
+        <driver name="AddBeamSpotToTrack"/>
+       <driver name="BSTrackReconParticle" />     
+        <driver name="BeamSpotTrackAnalysis"/> 
+        <driver name="AidaSaveDriver"/> 
+        <driver name="LCIOWriter"/> 
         <driver name="CleanupDriver"/>
     </execute>    
     <drivers>    
@@ -68,12 +72,26 @@
             <ecalName>Ecal</ecalName>
             <ecalCollectionName>EcalCalHits</ecalCollectionName>
         </driver>
+        <driver name="AddBeamSpotToTrack" type="org.hps.users.mgraham.AddBeamSpotToTrack">            
+        </driver>
+     <driver name="BeamSpotTrackAnalysis" type="org.hps.users.mgraham.BeamSpotTrackAnalysis">            
+        </driver>
         <driver name="ReconParticle" type="org.hps.recon.particle.HpsReconParticleDriver">  
-            <debug>false</debug>
+            <debug>true</debug>
         </driver>
-        <driver name="LCIOWriter" type="org.lcsim.util.loop.LCIODriver">
-            <outputFilePath>${outputFile}.slcio</outputFilePath>
+
+    <driver name="BSTrackReconParticle" type="org.hps.recon.particle.HpsReconParticleDriver">  
+            <debug>true</debug>
+            <tracksCollectionName>BeamSpotTracks</tracksCollectionName>
+            <finalStateParticlesColName>BSFinalStateParticles</finalStateParticlesColName>
+            <unconstrainedV0CandidatesColName>BSUnconstrainedV0Candidates</unconstrainedV0CandidatesColName>
+            <beamConV0CandidatesColName>BSBeamspotConstrainedV0Candidates</beamConV0CandidatesColName>
+            <targetConV0CandidatesColName>BSTargetConstrainedV0Candidates</targetConV0CandidatesColName>
+            <unconstrainedV0VerticesColName>BSUnconstrainedV0Vertices</unconstrainedV0VerticesColName>
+            <beamConV0VerticesColName>BSBeamspotConstrainedV0Vertices</beamConV0VerticesColName>
+            <targetConV0VerticesColName>BSTargetConstrainedV0Vertices</targetConV0VerticesColName>
         </driver>
+       
         <driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
                  <outputFileName>./conditions_test_plots.root</outputFileName>
         </driver>
@@ -105,6 +123,9 @@
             <overwriteDB>false</overwriteDB>
        </driver>
         <driver name="CleanupDriver" type="org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver"/>
-
+   <driver name="LCIOWriter"
+                type="org.lcsim.util.loop.LCIODriver">
+            <outputFilePath>blah.slcio</outputFilePath>
+        </driver>
     </drivers>
 </lcsim>

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/mgraham
DataQualityMonitorOnRecon.lcsim 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/mgraham/DataQualityMonitorOnRecon.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/mgraham/DataQualityMonitorOnRecon.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -80,28 +80,34 @@
         <driver name="SVTMonitoring" type="org.hps.analysis.dataquality.SvtMonitoring">
              <runNumber>${runNumber}</runNumber>
             <overwriteDB>false</overwriteDB>
+            <printDQMStrings>true</printDQMStrings>
         </driver>
        <driver name="TrackingMonitoring" type="org.hps.analysis.dataquality.TrackingMonitoring">
              <runNumber>${runNumber}</runNumber>
             <overwriteDB>false</overwriteDB>
+             <printDQMStrings>true</printDQMStrings>
        </driver>
   <driver name="TrackingResiduals" type="org.hps.analysis.dataquality.TrackingResiduals">
              <runNumber>${runNumber}</runNumber>
             <overwriteDB>false</overwriteDB>
+            <printDQMStrings>true</printDQMStrings>
        </driver>
        <driver name="FinalStateMonitoring" type="org.hps.analysis.dataquality.FinalStateMonitoring">
              <runNumber>${runNumber}</runNumber>
             <overwriteDB>false</overwriteDB>
+             <printDQMStrings>true</printDQMStrings>
        </driver>
       <driver name="TrackMCEfficiency" type="org.hps.analysis.dataquality.TrackMCEfficiency">
             <overwriteDB>false</overwriteDB>
+             <printDQMStrings>true</printDQMStrings>
        </driver> 
         <driver name="SVTHitMCEfficiency" type="org.hps.analysis.dataquality.SVTHitMCEfficiency">
             <overwriteDB>false</overwriteDB>
-            <printDQMStrings>false</printDQMStrings>
+            <printDQMStrings>true</printDQMStrings>
        </driver> 
        <driver name="V0Monitoring" type="org.hps.analysis.dataquality.V0Monitoring">
              <runNumber>${runNumber}</runNumber>
+              <printDQMStrings>true</printDQMStrings>
             <overwriteDB>false</overwriteDB>
        </driver>
         <driver name="CleanupDriver" type="org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver"/>

java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/phansson
HPSTrackingDefaults.lcsim 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/phansson/HPSTrackingDefaults.lcsim	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/steering-files/src/main/resources/org/hps/steering/users/phansson/HPSTrackingDefaults.lcsim	2014-10-19 06:41:14 UTC (rev 1244)
@@ -13,14 +13,23 @@
 
     <execute>
         <driver name="EventMarkerDriver"/>
+         <driver name="CalibrationDriver"/>   
+        <driver name="BadChannelFilter" />
+        <driver name="SimpleSVTReadout"/>
+        <driver name="RawTrackerHitFitterDriver"/>
+        <driver name="TrackerHitDriver"/>
+    <!--
         <driver name="HPSSVTSensorSetup"/>   
         <driver name="TrackerDigiDriver"/> 
+    -->
         <driver name="HelicalTrackHitDriver"/>
         <driver name="TrackerReconDriver"/>
+        <driver name="SVTEventInfo"/>
+        <driver name="SVTHitReconstructionPlots"/>
+        <driver name="TrackingReconstructionPlots"/>
         <driver name="LCIOWriter"/>
 
 
-
     </execute>    
  
     <drivers>
@@ -28,8 +37,19 @@
                 type="org.lcsim.job.EventMarkerDriver">
             <eventInterval>100</eventInterval>
         </driver>
+        <driver name="CalibrationDriver" type="org.hps.conditions.deprecated.CalibrationDriver">
+            <runNumber>${runNumber}</runNumber>
+        </driver>
+         <driver name="BadChannelFilter" type="org.hps.recon.tracking.SVTBadChannelFilterDriver" />     
  		<driver name="HPSSVTSensorSetup" type="org.hps.conditions.deprecated.HPSSVTSensorSetup"/>
-        
+        <driver name="SimpleSVTReadout" type="org.hps.readout.svt.SimpleSvtReadout">
+            <noPileup>true</noPileup>
+        </driver>
+        <driver name="RawTrackerHitFitterDriver" type="org.hps.recon.tracking.RawTrackerHitFitterDriver">
+            <fitAlgorithm>Analytic</fitAlgorithm>
+            <correctT0Shift>true</correctT0Shift>
+        </driver>
+        <driver name="TrackerHitDriver" type="org.hps.recon.tracking.DataTrackerHitDriver" />     
         <driver name="TrackerDigiDriver"
                 type="org.hps.recon.tracking.SimpleTrackerDigiDriver">
             <debug>true</debug>
@@ -37,17 +57,28 @@
 
         <driver name="HelicalTrackHitDriver"
                 type="org.hps.recon.tracking.HelicalTrackHitDriver">
-            <debug>true</debug>
+            <debug>false</debug>
             <maxSeperation>20.0</maxSeperation>
             <tolerance>1.0</tolerance>
         </driver>
         
-        <driver name="TrackerReconDriver"
-                type="org.hps.recon.tracking.TrackerReconDriver">
-            <debug>true</debug>
-            <strategyResource>/org/hps/recon/tracking/strategies/HPS-Full.xml</strategyResource>
+         
+        <driver name="TrackerReconDriver" type="org.hps.recon.tracking.TrackerReconDriver">
+            <debug>false</debug>
+            <strategyResource>/org/hps/recon/tracking/strategies/HPS-Test-All.xml</strategyResource>
+        </driver>   
+
+        <driver name="TrackingReconstructionPlots" type="org.hps.monitoring.drivers.svt.TrackingReconstructionPlots">
+            <outputPlots>TrackingReconstructionPlots.aida</outputPlots>
         </driver>
 
+        <driver name="SVTHitReconstructionPlots" type="org.hps.monitoring.drivers.svt.SVTHitReconstructionPlots">
+            <outputPlots>SVTHitReconstructionPlots.aida</outputPlots>
+        </driver>
+        
+        <driver name="SVTEventInfo" type="org.hps.monitoring.drivers.svt.SVTEventInfo">
+        </driver>
+
         <driver name="LCIOWriter"
                 type="org.lcsim.util.loop.LCIODriver">
             <outputFilePath>${outputFile}</outputFilePath>

java/branches/hps_java_trunk_HPSJAVA-255/tracking
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,24 +1,20 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-tracking</artifactId>
     <name>tracking</name>
-    <description>HPS tracking reconstruction module</description>
-    
+    <description>tracking reconstruction algorithms</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>
-    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/tracking/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/tracking/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/tracking/</developerConnection>
     </scm>
-    
     <build>
         <plugins>
             <plugin>
@@ -27,6 +23,7 @@
                 <configuration>
                     <excludes>
                         <exclude>org/hps/recon/tracking/TruthResidualTest.java</exclude>
+                        <exclude>org/hps/recon/tracking/TrackRecoFromScratchTest.java</exclude>
                         <!-- Test input LCIO file is missing for next two. -->
                         <exclude>org/hps/recon/tracking/TestRunTrackReconTest.java</exclude>
                         <exclude>org/hps/recon/tracking/HelicalTrackHitDriverTest.java</exclude>
@@ -35,14 +32,8 @@
             </plugin>
         </plugins>
     </build>
-
     <dependencies>
         <dependency>
-            <groupId>org.lcsim</groupId>
-            <artifactId>lcsim-distribution</artifactId>
-            <version>${lcsimVersion}</version>
-        </dependency>
-        <dependency>
             <groupId>org.hps</groupId>
             <artifactId>hps-conditions</artifactId>
         </dependency>
@@ -61,5 +52,4 @@
             <version>1.0.2-SNAPSHOT</version>
         </dependency>
     </dependencies>
-    
 </project>

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking
MaterialSupervisor.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/MaterialSupervisor.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/MaterialSupervisor.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -25,56 +25,71 @@
 
 /**
  * Material manager using the detector geometry.
- * 
- * Uses a private class to set up detector volumes. This can probably make use of the
- * DetectorGeometry classes from lcsim instead for the model. Something to consider in the future.
- * 
+ *
+ * Uses a private class to set up detector volumes. This can probably make use
+ * of the DetectorGeometry classes from lcsim instead for the model. Something
+ * to consider in the future.
+ *
  * @author Per Hansson <[log in to unmask]>
  */
-public class MaterialSupervisor extends MaterialManager {
+public class MaterialSupervisor extends MaterialManager
+{
 
     private List<ScatteringDetectorVolume> _detectorVolumes = new ArrayList<ScatteringDetectorVolume>();
 
-    public MaterialSupervisor() {
+    public MaterialSupervisor()
+    {
         super();
         this._includeMS = true;
     }
 
-    public MaterialSupervisor(boolean includeMS) {
+    public MaterialSupervisor(boolean includeMS)
+    {
         super(includeMS);
     }
 
     @Override
-    public void setDebug(boolean debug) {
+    public void setDebug(boolean debug)
+    {
         super.setDebug(debug);
     }
 
-    public List<ScatteringDetectorVolume> getMaterialVolumes() {
+    public List<ScatteringDetectorVolume> getMaterialVolumes()
+    {
         return _detectorVolumes;
     }
 
     @Override
-    public void buildModel(Detector det) {
+    public void buildModel(Detector det)
+    {
         // super.buildModel(det);
-        // if(DEBUG)
-        System.out.printf("%s: ###########################################################\n", this.getClass().getSimpleName());
-        System.out.printf("%s: Build detector model\n", this.getClass().getSimpleName());
+        if (DEBUG) {
+            System.out.printf("%s: ###########################################################\n", this.getClass().getSimpleName());
+            System.out.printf("%s: Build detector model\n", this.getClass().getSimpleName());
+        }
         List<SiSensor> sensors = det.getSubdetector("Tracker").getDetectorElement().findDescendants(SiSensor.class);
         // List<SiTrackerModule> modules =
         // det.getDetectorElement().findDescendants(SiTrackerModule.class);
-        System.out.printf("%s: %d sensors\n", this.getClass().getSimpleName(), sensors.size());
-        System.out.printf("%s: %5s %32s %22s %15s %10s %10s\n", this.getClass().getSimpleName(), "ID", "Pos (mm)", "size(mm)", "t(mm)", "t(%R.L)", "type");
+        if (DEBUG) {
+            System.out.printf("%s: %d sensors\n", this.getClass().getSimpleName(), sensors.size());
+            System.out.printf("%s: %5s %32s %22s %15s %10s %10s\n", this.getClass().getSimpleName(), "ID", "Pos (mm)", "size(mm)", "t(mm)", "t(%R.L)", "type");
+        }
         for (SiSensor module : sensors) {
 
             SiStripPlane plane = new SiStripPlane(module);
 
-            System.out.printf("%s: %5d %32s %15.2fx%.2f %10.2f %10.3f %10s\n", this.getClass().getSimpleName(), plane.getId(), plane.origin().toString(), plane.getUnmeasuredDimension(), plane.getMeasuredDimension(), plane.getThickness(), plane.getThicknessInRL() * 100, SvtUtils.getInstance().isAxial(module) ? "axial" : "stereo");
+            if (DEBUG) {
+                System.out.printf("%s: %5d %32s %15.2fx%.2f %10.2f %10.3f %10s\n", this.getClass().getSimpleName(), plane.getId(), plane.origin().toString(), plane.getUnmeasuredDimension(), plane.getMeasuredDimension(), plane.getThickness(), plane.getThicknessInRL() * 100, SvtUtils.getInstance().isAxial(module) ? "axial" : "stereo");
+            }
             _detectorVolumes.add(plane);
         }
-        System.out.printf("%s: ###########################################################\n", this.getClass().getSimpleName());
+        if (DEBUG) {
+            System.out.printf("%s: ###########################################################\n", this.getClass().getSimpleName());
+        }
     }
 
-    public interface ScatteringDetectorVolume {
+    public interface ScatteringDetectorVolume
+    {
 
         public String getName();
 
@@ -88,7 +103,8 @@
     }
 
     // public abstract class DetectorPlane extends SiSensor {
-    public interface DetectorPlane extends ScatteringDetectorVolume {
+    public interface DetectorPlane extends ScatteringDetectorVolume
+    {
 
         public double getThickness();
 
@@ -106,12 +122,14 @@
 
     }
 
-    private abstract class SiPlane implements DetectorPlane {
+    private abstract class SiPlane implements DetectorPlane
+    {
 
         abstract void addMaterial();
     }
 
-    public class SiStripPlane extends SiPlane {
+    public class SiStripPlane extends SiPlane
+    {
 
         private Hep3Vector _org = null; // origin
         private Hep3Vector _w = null; // normal to plane
@@ -122,7 +140,8 @@
         private double _length;
         private double _width;
 
-        public SiStripPlane(SiSensor module) {
+        public SiStripPlane(SiSensor module)
+        {
             _sensor = module;
             setOrigin();
             setNormal();
@@ -134,32 +153,39 @@
         }
 
         @Override
-        public IDetectorElement getDetectorElement() {
+        public IDetectorElement getDetectorElement()
+        {
             return getSensor();
         }
 
-        private SiTrackerModule getModule() {
+        private SiTrackerModule getModule()
+        {
             return (SiTrackerModule) getGeometry().getDetectorElement().getParent();
         }
 
-        private IGeometryInfo getGeometry() {
+        private IGeometryInfo getGeometry()
+        {
             return getSensor().getGeometry();
         }
 
-        SiSensor getSensor() {
+        public SiSensor getSensor()
+        {
             return _sensor;
         }
 
-        Polygon3D getPsidePlane() {
+        public Polygon3D getPsidePlane()
+        {
             return getSensor().getBiasSurface(ChargeCarrier.HOLE);
         }
 
-        Polygon3D getNsidePlane() {
+        public Polygon3D getNsidePlane()
+        {
             return getSensor().getBiasSurface(ChargeCarrier.ELECTRON);
         }
 
         @Override
-        public double getMaterialTraversed(Hep3Vector dir) {
+        public double getMaterialTraversed(Hep3Vector dir)
+        {
             // the distance inside the plane (note I don't care about sign of unit vector only
             // projection distance)
             double cth = Math.abs(VecOp.dot(dir, _w));
@@ -168,7 +194,8 @@
         }
 
         @Override
-        public double getMaterialTraversedInRL(Hep3Vector dir) {
+        public double getMaterialTraversedInRL(Hep3Vector dir)
+        {
             // the distance inside the plane (note I don't care about sign of unit vector only
             // projection distance)
             double cth = Math.abs(VecOp.dot(dir, _w));
@@ -177,7 +204,8 @@
         }
 
         @Override
-        protected void addMaterial() {
+        protected void addMaterial()
+        {
 
             IPhysicalVolume parent = getModule().getGeometry().getPhysicalVolume();
             IPhysicalVolumeContainer daughters = parent.getLogicalVolume().getDaughters();
@@ -194,21 +222,25 @@
             }
         }
 
-        public void addMaterial(String type, double density, double radLen, double t) {
+        public void addMaterial(String type, double density, double radLen, double t)
+        {
             _materials.add(type, density, radLen, t);
         }
 
         @Override
-        public double getThickness() {
+        public double getThickness()
+        {
             return _materials.getThickness();
         }
 
         @Override
-        public double getThicknessInRL() {
+        public double getThicknessInRL()
+        {
             return _materials.getThicknessInRL();
         }
 
-        private void setDimensions() {
+        private void setDimensions()
+        {
             // The dimensions are taken from the full module
             IPhysicalVolume physVol_parent = getModule().getGeometry().getPhysicalVolume();
             ILogicalVolume logVol_parent = physVol_parent.getLogicalVolume();
@@ -225,17 +257,20 @@
         }
 
         @Override
-        public Hep3Vector origin() {
+        public Hep3Vector origin()
+        {
 
             return _org;
         }
 
-        public void setOrigin(Hep3Vector org) {
+        public void setOrigin(Hep3Vector org)
+        {
 
             this._org = org;
         }
 
-        private void setOrigin() {
+        private void setOrigin()
+        {
             // Use origin of p-side surface
             Hep3Vector origin = VecOp.mult(CoordinateTransformations.getMatrix(), _sensor.getGeometry().getPosition());
             // transform to p-side
@@ -245,7 +280,8 @@
         }
 
         @Override
-        public Hep3Vector normal() {
+        public Hep3Vector normal()
+        {
             if (_w == null) {
                 _w = this.getPsidePlane().getNormal();
                 System.out.printf("setting normal from pside normal %s\n", _w.toString());
@@ -255,57 +291,69 @@
             return this._w;
         }
 
-        private void setNormal() {
+        private void setNormal()
+        {
             _w = this.getPsidePlane().getNormal();
             _w = VecOp.mult(VecOp.mult(CoordinateTransformations.getMatrix(), getSensor().getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal().getRotation().getRotationMatrix()), _w);
         }
 
-        public void setNormal(Hep3Vector w) {
+        public void setNormal(Hep3Vector w)
+        {
             this._w = w;
         }
 
         @Override
-        public void print() {
+        public void print()
+        {
             System.out.printf("DetectorPlane:  org %s normal vector %s %.2fx%.2fmm  thickness %f R.L. (%fmm)\n", origin().toString(), normal().toString(), getLength(), getWidth(), getThicknessInRL(), getThickness());
         }
 
         @Override
-        public int getId() {
+        public int getId()
+        {
             return _sensor.getSensorID();
         }
 
         @Override
-        public String getName() {
+        public String getName()
+        {
             return _sensor.getName();
         }
 
         @Override
-        public double getLength() {
+        public double getLength()
+        {
             return _length;
         }
 
         @Override
-        public double getWidth() {
+        public double getWidth()
+        {
             return _width;
         }
 
-        double getMeasuredDimension() {
+        public double getMeasuredDimension()
+        {
             return getLength();
         }
 
-        double getUnmeasuredDimension() {
+        public double getUnmeasuredDimension()
+        {
             return getWidth();
         }
 
-        Hep3Vector getUnmeasuredCoordinate() {
+        public Hep3Vector getUnmeasuredCoordinate()
+        {
             return _v;
         }
 
-        Hep3Vector getMeasuredCoordinate() {
+        public Hep3Vector getMeasuredCoordinate()
+        {
             return _u;
         }
 
-        private void setMeasuredCoordinate() {
+        private void setMeasuredCoordinate()
+        {
             // p-side unit vector
             ITransform3D electrodes_to_global = getSensor().getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
             Hep3Vector measuredCoordinate = getSensor().getReadoutElectrodes(ChargeCarrier.HOLE).getMeasuredCoordinate(0);
@@ -313,7 +361,8 @@
             _u = measuredCoordinate;
         }
 
-        private void setUnmeasuredCoordinate() {
+        private void setUnmeasuredCoordinate()
+        {
             // p-side unit vector
             ITransform3D electrodes_to_global = getSensor().getReadoutElectrodes(ChargeCarrier.HOLE).getLocalToGlobal();
             Hep3Vector unmeasuredCoordinate = getSensor().getReadoutElectrodes(ChargeCarrier.HOLE).getUnmeasuredCoordinate(0);
@@ -323,51 +372,61 @@
 
     }
 
-    private static class Material {
+    private static class Material
+    {
 
         private String _name;
         private double _X0;
         private double _density;
         private double _thickness;
 
-        public Material(String _name, double _X0, double _density, double _thickness) {
+        public Material(String _name, double _X0, double _density, double _thickness)
+        {
             this._name = _name;
             this._X0 = _X0;
             this._density = _density;
             this._thickness = _thickness;
         }
 
-        private void add(double t) {
+        private void add(double t)
+        {
             _thickness += t;
         }
 
-        public double getThickness() {
+        public double getThickness()
+        {
             return _thickness;
         }
 
-        public double getDensity() {
+        public double getDensity()
+        {
             return _density;
         }
 
-        public double getX0() {
+        public double getX0()
+        {
             return _X0;
         }
 
     }
 
-    private static class Materials {
+    private static class Materials
+    {
 
         private List<Material> _materials = new ArrayList<Material>();
         private double _tot_X0 = -1;
 
-        public Materials() {
+        public Materials()
+        {
         }
 
-        public int numberOfMaterials() {
+        public int numberOfMaterials()
+        {
             return _materials.size();
         }
 
-        public void add(String mat, double density, double radLen, double t) {
+        public void add(String mat, double density, double radLen, double t)
+        {
             boolean found = false;
             for (Material m : _materials) {
                 if (m._name == mat) {
@@ -383,9 +442,11 @@
 
         }
 
-        public double getThicknessInRL() {
-            if (_materials.isEmpty())
+        public double getThicknessInRL()
+        {
+            if (_materials.isEmpty()) {
                 return 0;
+            }
             if (_tot_X0 < 0) {
                 double sum = 0.;
                 for (Material m : _materials) {
@@ -403,7 +464,8 @@
             return _tot_X0;
         }
 
-        private double getThickness() {
+        private double getThickness()
+        {
             double t_tot = 0.;
             for (Material m : _materials) {
                 t_tot += m.getThickness();

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking
TrackerDigiDriver.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/TrackerDigiDriver.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/TrackerDigiDriver.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -291,7 +291,9 @@
 
         // Put output hits into collection.
         int flag = LCIOUtil.bitSet(0, 31, true); // Turn on 64-bit cell ID.
-        event.put(this.rawTrackerHitOutputCollectionName, rawHits, RawTrackerHit.class, flag, toString());
-        event.put(this.stripHitOutputCollectionName, stripHits1D, SiTrackerHitStrip1D.class, 0, toString());
+        //System.out.println("TrackerDigiDriver putting collection " + this.rawTrackerHitOutputCollectionName + " with readoutName " + readoutCollectionName);
+        event.put(this.rawTrackerHitOutputCollectionName, rawHits, RawTrackerHit.class, flag, readoutCollectionName);
+        //System.out.println("TrackerDigiDriver putting collection " + this.stripHitOutputCollectionName + " with readoutName " + readoutCollectionName);
+        event.put(this.stripHitOutputCollectionName, stripHits1D, SiTrackerHitStrip1D.class, 0, readoutCollectionName);
     }
 }
\ No newline at end of file

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl
HpsGblFitter.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblFitter.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblFitter.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -192,8 +192,8 @@
 
             Matrix mDirT = MatrixOp.transposed(mDir); //new BasicMatrix(MatrixOp.transposed(mDir));
             if (_debug) {
-                System.out.printf(" mDir \n%s\n", this.getClass().getSimpleName(), mDir.toString());
-                System.out.printf(" mDirT \n%s\n", this.getClass().getSimpleName(), mDirT.toString());
+                System.out.printf(" mDir \n%s\n%s\n", this.getClass().getSimpleName(), mDir.toString());
+                System.out.printf(" mDirT \n%s\n%s\n", this.getClass().getSimpleName(), mDirT.toString());
             }
 
             // Track direction 
@@ -213,7 +213,7 @@
             uvDir.setElement(1, 2, cosLambda);
 
             if (_debug) {
-                System.out.printf(" uvDir \n%s\n", this.getClass().getSimpleName(), uvDir.toString());
+                System.out.printf(" uvDir \n%s\n%s\n", this.getClass().getSimpleName(), uvDir.toString());
             }
 
             // projection from  measurement to local (curvilinear uv) directions (duv/dm)
@@ -224,8 +224,8 @@
 
             //proL2m_list[strip->GetId()] = new TMatrixD(proL2m);
             if (_debug) {
-                System.out.printf(" proM2l \n%s\n", this.getClass().getSimpleName(), proM2l.toString());
-                System.out.printf(" proL2m \n%s\n", this.getClass().getSimpleName(), proL2m.toString());
+                System.out.printf(" proM2l \n%s\n%s\n", this.getClass().getSimpleName(), proM2l.toString());
+                System.out.printf(" proL2m \n%s\n%s\n", this.getClass().getSimpleName(), proL2m.toString());
             }
 
             // measurement/residual in the measurement system
@@ -404,6 +404,12 @@
             return -3;
         }
 
+        // print the trajectory
+        if(_debug)
+        {
+            System.out.println(" Gbl Trajectory ");
+            _traj.printPoints(4);
+        }
         // fit trajectory
         _traj.fit(m_chi2, m_ndf, m_lost_weight);
         

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl
HpsGblRefitter.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl/HpsGblRefitter.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -23,13 +23,13 @@
 import org.hps.recon.tracking.gbl.matrix.Vector;
 
 /**
- * A Driver which refits tracks using GBL Modeled on the hps-dst code written by
- * Per Hansson and Omar Moreno Requires the GBL Collections and Relations to be
- * present in the event.
+ * A Driver which refits tracks using GBL. 
+ * Modeled on the hps-dst code written by Per Hansson and Omar Moreno. 
+ * Requires the GBL Collections and Relations to be present in the event.
  *
  * @author Norman A Graf
  *
- * @version $Id:
+ * @version $Id$
  */
 public class HpsGblRefitter extends Driver
 {

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl
MilleBinary.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl/MilleBinary.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/gbl/MilleBinary.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,5 +1,148 @@
 package org.hps.recon.tracking.gbl;
 
-public class MilleBinary {
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.FileChannel;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
+/**
+ * Millepede-II (binary) record.
+ * Containing information for local (track) and global fit.
+ *
+ *         real array              integer array
+ *     0   0.0                     error count (this record)
+ *     1   RMEAS, measured value   0                            -+
+ *     2   local derivative        index of local derivative     |
+ *     3   local derivative        index of local derivative     |
+ *     4    ...                                                  | block
+ *         SIGMA, error (>0)       0                             |
+ *         global derivative       label of global derivative    |
+ *         global derivative       label of global derivative   -+
+ *         RMEAS, measured value   0
+ *         local derivative        index of local derivative
+ *         local derivative        index of local derivative
+ *         ...
+ *         SIGMA, error            0
+ *         global derivative       label of global derivative
+ *         global derivative       label of global derivative
+ *         ...
+ *         global derivative       label of global derivative
+ *
+ * @author Norman A Graf
+ *
+ * @version $Id$
+ * 
+ */
+public class MilleBinary
+{
+    FileChannel _channel;
+    List<Integer> _intBuffer = new ArrayList<Integer>();
+    List<Float> _floatBuffer = new ArrayList<Float>();
+    
+    static String DEFAULT_OUTPUT_FILE_NAME = "millepedeData.bin"; 
+    
+    /**
+     * Default Constructor
+     */
+    public MilleBinary() {
+        try {
+            _channel = new FileOutputStream(DEFAULT_OUTPUT_FILE_NAME).getChannel();
+        } catch (FileNotFoundException ex) {
+            Logger.getLogger(MilleBinary.class.getName()).log(Level.SEVERE, null, ex);
+        }
+        _intBuffer.add(0); // first word is error counter
+        _floatBuffer.add(0f);
+    }
+
+    /**
+     * Fully qualified Constructor
+     * @param outputFileName name of output binary file for millepede II
+     */
+        public MilleBinary(String outputFileName)
+    {
+        try {
+            _channel = new FileOutputStream(outputFileName).getChannel();
+        } catch (FileNotFoundException ex) {
+            Logger.getLogger(MilleBinary.class.getName()).log(Level.SEVERE, null, ex);
+        }
+        _intBuffer.add(0); // first word is error counter
+        _floatBuffer.add(0f);
+    }
+
+    /**
+     * Closes the binary output file
+     */
+    public void close()
+    {
+        try {
+            _channel.close();
+        } catch (IOException ex) {
+            Logger.getLogger(MilleBinary.class.getName()).log(Level.SEVERE, null, ex);
+        }
+    }
+
+    /**
+     * Add data block to (end of) record.
+     * @param aMeas      Value
+     * @param aErr       Error
+     * @param indLocal   List of labels of local parameters
+     * @param derLocal   List of derivatives for local parameters
+     * @param labGlobal  List of labels of global parameters
+     * @param derGlobal  List of derivatives for global parameters
+     */
+        public void addData(float aMeas, float aErr,
+                        List<Integer> indLocal,
+                        List<Double> derLocal,
+                        List<Integer> labGlobal,
+                        List<Double> derGlobal)
+    {
+        _intBuffer.add(0);
+        _floatBuffer.add(aMeas);
+        for (int i = 0; i < indLocal.size(); ++i) {
+            _intBuffer.add(indLocal.get(i));
+            _floatBuffer.add((float) derLocal.get(i).doubleValue());
+        }
+        _intBuffer.add(0);
+        _floatBuffer.add(aErr);
+        for (int i = 0; i < labGlobal.size(); ++i) {
+            if (derGlobal.get(i) != 0) {
+                _intBuffer.add(labGlobal.get(i));
+                _floatBuffer.add((float) derGlobal.get(i).doubleValue());
+            }
+        }
+    }
+
+    /**
+     * Write record to file.
+     */
+        public void writeRecord()
+    {
+        int recordLength = _intBuffer.size() * 2;
+        ByteBuffer b = ByteBuffer.allocate((recordLength + 1) * 2);
+        b.order(ByteOrder.LITTLE_ENDIAN);
+        b.putInt(recordLength);
+        for (Float f : _floatBuffer) {
+            b.putFloat(f);
+        }
+        for (Integer i : _intBuffer) {
+            b.putInt(i);
+        }
+        b.flip();
+        try {
+            _channel.write(b);
+        } catch (IOException ex) {
+            Logger.getLogger(MilleBinary.class.getName()).log(Level.SEVERE, null, ex);
+        }
+        b.clear();
+        _floatBuffer.clear();
+        _intBuffer.clear();
+        _intBuffer.add(0); // first word is error counter
+        _floatBuffer.add(0f);
+    }
 }

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield
StraightTrack.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield/StraightTrack.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield/StraightTrack.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,20 +1,12 @@
 package org.hps.recon.tracking.nobfield;
 
 import hep.physics.matrix.SymmetricMatrix;
-import static java.lang.Math.abs;
-import static java.lang.Math.signum;
 import java.util.ArrayList;
 import java.util.List;
-import org.lcsim.constants.Constants;
 import org.lcsim.event.LCIOParameters;
 import org.lcsim.event.Track;
 import org.lcsim.event.TrackState;
 import org.lcsim.event.TrackerHit;
-import static org.lcsim.event.base.BaseTrack.D0;
-import static org.lcsim.event.base.BaseTrack.OMEGA;
-import static org.lcsim.event.base.BaseTrack.PHI;
-import static org.lcsim.event.base.BaseTrack.TANLAMBDA;
-import static org.lcsim.event.base.BaseTrack.Z0;
 import org.lcsim.event.base.BaseTrackState;
 
 /**
@@ -31,6 +23,9 @@
     protected List<TrackState> _trackStates;
     protected double[] _chi2 = new double[2];
     protected double[] _parameters = new double[5];
+     protected double[] _momentum = new double[3];
+      protected double[] _ref = new double[3];
+    protected int _ndf;
     // Parameter ordering.
     public static final int x0 = LCIOParameters.ParameterName.d0.ordinal();
     public static final int slopeXZ = LCIOParameters.ParameterName.phi0.ordinal();
@@ -139,59 +134,59 @@
 
     @Override
     public int getCharge() {
-        throw new UnsupportedOperationException("StraightTrack...no momentum measured."); //To change body of generated methods, choose Tools | Templates.
+        return -999;
     }
 
     @Override
     public double[] getReferencePoint() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return _ref;
     }
 
     @Override
     public double getReferencePointX() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return _ref[0];
     }
 
     @Override
     public double getReferencePointY() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+    return _ref[1];
     }
 
     @Override
     public double getReferencePointZ() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+         return _ref[2];
     }
 
     @Override
     public boolean isReferencePointPCA() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return false;
     }
 
     @Override
     public double[] getMomentum() {
-        throw new UnsupportedOperationException("StraightTrack...no momentum measured."); //To change body of generated methods, choose Tools | Templates.
+        return _momentum;
     }
 
     @Override
     public double getPX() {
-        throw new UnsupportedOperationException("StraightTrack...no momentum measured."); //To change body of generated methods, choose Tools | Templates.
+        return -999;
     }
 
     @Override
     public double getPY() {
-        throw new UnsupportedOperationException("StraightTrack...no momentum measured."); //To change body of generated methods, choose Tools | Templates.
+        return -999;
     }
 
     @Override
     public double getPZ() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return -999;
     }
 
     @Override
     public boolean fitSuccess() {
         throw new UnsupportedOperationException("StraightTrack...no momentum measured."); //To change body of generated methods, choose Tools | Templates.
     }
-
+    
     @Override
     public double getTrackParameter(int i) {
         return _parameters[i];
@@ -227,24 +222,32 @@
 
     @Override
     public int getNDF() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return _ndf;
     }
 
+    public void setNDF(int ndf) {
+        _ndf = ndf;
+    }
+
     @Override
     public double getdEdx() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return -999;
     }
 
     @Override
     public double getdEdxError() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return -999;
     }
 
     @Override
     public double getRadiusOfInnermostHit() {
-        throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
+        return -999;
     }
 
+//    public TrackDirection getTrackDirection(){
+        
+ //   }
+    
     public String toString() {
         String className = getClass().getName();
         int lastDot = className.lastIndexOf('.');

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield
StraightTrackFinder.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield/StraightTrackFinder.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield/StraightTrackFinder.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -3,7 +3,10 @@
 import java.util.ArrayList;
 import java.util.List;
 import org.hps.recon.tracking.HitCollectionUtilites;
+import org.hps.recon.tracking.nobfield.TrackCollectionUtilities;
 import org.lcsim.event.EventHeader;
+import org.lcsim.event.Track;
+import org.lcsim.event.TrackerHit;
 import org.lcsim.fit.helicaltrack.HelicalTrackHit;
 import org.lcsim.fit.line.SlopeInterceptLineFit;
 import org.lcsim.fit.line.SlopeInterceptLineFitter;
@@ -17,7 +20,7 @@
 public class StraightTrackFinder extends Driver {
 
     // Debug flag.
-    private boolean debug = false;
+    private boolean debug = true;
     // Tracks found across all events.
     int ntracks = 0;
     // Number of events processed.
@@ -36,9 +39,10 @@
     private int _iterativeConfirmed = 3;
     // use HPS implementation of material manager
     private boolean _useHPSMaterialManager = true;
-    // enable the use of sectoring using sector binning in SeedTracker
-    private boolean _applySectorBinning = true;
 
+    private TrackChecker checkerTrack = new TrackChecker();
+    private HitOnTrackChecker checkerHOT = new HitOnTrackChecker();
+
     private SlopeInterceptLineFitter _lfitter = new SlopeInterceptLineFitter();
 
     public void setDebug(boolean debug) {
@@ -76,9 +80,6 @@
         this._useHPSMaterialManager = useHPSMaterialManager;
     }
 
-    /**
-     * This is used to setup the Drivers after XML config.
-     */
     @Override
     public void detectorChanged(Detector detector) {
         // Cache Detector object.
@@ -93,68 +94,44 @@
             return;
 
         List<HelicalTrackHit> allHits = event.get(HelicalTrackHit.class, stInputCollectionName);
-        if (allHits.size() == 0)
-            return;
+
         List<List<HelicalTrackHit>> splitTopBot = HitCollectionUtilites.SplitTopBottomHits(allHits);
         // will always have top(=0) and bottom(=1) lists (though they may be empty)
         List<HelicalTrackHit> topHits = splitTopBot.get(0);
         List<HelicalTrackHit> bottomHits = splitTopBot.get(1);
-        //a simple strategy...eventually implement SeedTracker strategies
+        //a simple strategy...eventually implement SeedTracker strategies?
         int nTotLayers = 6;
-        int[] layerStrategy = {1, 3, 5, 7, 9, 11};
+        int nSeed = 3;
+        int nExtra = nTotLayers - nSeed;
+        int[] seedStrategy = {1, 3, 5};
+        int[] extendStrategy = {7, 9, 11};
         int minHits = 4;
 
-        List<StraightTrack> trackList = new ArrayList<>();
-//sort the hits for some reason
-//        List<List<HelicalTrackHit>> sortedTopHits=new ArrayList<>();
-//         List<List<HelicalTrackHit>> sortedBottomHits=new ArrayList<>();
-//        for(int i = 0;i<nTotLayers;i++){
-//            List<HelicalTrackHit> sortedTop=HitCollectionUtilites.GetSortedHits(topHits,layerStrategy[i]);
-//            sortedTopHits.add(sortedTop);
-//            List<HelicalTrackHit> sortedBot=HitCollectionUtilites.GetSortedHits(bottomHits,layerStrategy[i]);
-//            sortedBottomHits.add(sortedBot);                      
-//        }
-//        
-        if (topHits.size() < 4)
-            return;
-        //first do top...
-        for (HelicalTrackHit h1 : HitCollectionUtilites.GetSortedHits(topHits, layerStrategy[0])) {
-            if (debug)
-                System.out.println(h1.toString());
-            for (HelicalTrackHit h2 : HitCollectionUtilites.GetSortedHits(topHits, layerStrategy[1])) {
-                if (debug)
-                    System.out.println(h2.toString());
-                for (HelicalTrackHit h3 : HitCollectionUtilites.GetSortedHits(topHits, layerStrategy[2])) {
-                    if (debug)
-                        System.out.println(h3.toString());
-                    for (HelicalTrackHit h4 : HitCollectionUtilites.GetSortedHits(topHits, layerStrategy[3])) {
-                        if (debug)
-                            System.out.println(h4.toString());
-                        for (HelicalTrackHit h5 : HitCollectionUtilites.GetSortedHits(topHits, layerStrategy[4])) {
-                            if (debug)
-                                System.out.println(h5.toString());
-                            //  Setup for the line fit
-                            List<HelicalTrackHit> testTrack = new ArrayList<HelicalTrackHit>();
-                            testTrack.add(h1);
-                            testTrack.add(h2);
-                            testTrack.add(h3);
-                            testTrack.add(h4);
-                            testTrack.add(h5);
-                            SlopeInterceptLineFit xfit = FitToLine(testTrack, 0);
-                            SlopeInterceptLineFit yfit = FitToLine(testTrack, 1);
-                            if (debug)
-                                System.out.println("xfit = " + xfit.toString());
-                            if (debug)
-                                System.out.println("yfit = " + yfit.toString());
-                            StraightTrack trk = makeTrack(xfit, yfit);
-                            trackList.add(trk);
-                        }
-                    }
-                }
-            }
+        TrackChecker checkerTrack = new TrackChecker();
+        HitOnTrackChecker checkerHOT = new HitOnTrackChecker();
+
+//        List<StraightTrack> seeds = getSeeds(seedStrategy, topHits);
+        List<StraightTrack> seeds = getSeeds(seedStrategy, allHits);
+        System.out.println("Found " + seeds.size() + " seeds");
+
+        List<StraightTrack> extendedSeeds = new ArrayList<>();
+        for (StraightTrack seed : seeds)
+            extendTrack(extendStrategy, 0, seed, allHits, extendedSeeds);
+//            extendTrack(extendStrategy, 0, seed, topHits, extendedSeeds);
+
+        System.out.println("Prepruning  :Found " + extendedSeeds.size() + " extended seeds");
+
+        //remove tracks with more than m overlaping hits...pick best chi2
+        //...
+        List<StraightTrack> finalTracks = new ArrayList<>();
+        for (StraightTrack track : extendedSeeds) {
+            boolean isbest = TrackCollectionUtilities.pruneTrackList((ArrayList<Track>) (ArrayList) extendedSeeds, track, 1);
+            if (isbest)
+                finalTracks.add(track);
         }
 
-        event.put(trackCollectionName, trackList);
+        System.out.println("Postpruning  :Found " + finalTracks.size() + " extended seeds");
+        event.put(trackCollectionName, finalTracks);
     }
 
     public SlopeInterceptLineFit FitToLine(List<HelicalTrackHit> hits, int projection) {
@@ -188,17 +165,108 @@
 
     }
 
-    private StraightTrack makeTrack(SlopeInterceptLineFit xfit, SlopeInterceptLineFit yfit) {
+    private StraightTrack makeTrack(List<HelicalTrackHit> hits, SlopeInterceptLineFit xfit, SlopeInterceptLineFit yfit) {
         StraightTrack track = new StraightTrack();
-        double[] pars = {-99, -99, -99, -99};
+        double[] pars = {-99, -99, -99, -99, -99};//this needs to have 5 fields to implement Track
         pars[0] = xfit.intercept();
         pars[1] = xfit.slope();
         pars[2] = yfit.intercept();
         pars[3] = yfit.slope();
         track.setTrackParameters(pars);
         track.setChi2(xfit.chisquared(), yfit.chisquared());
+        track.setNDF(xfit.ndf()+yfit.ndf());
+        for (TrackerHit hit : hits)
+            track.addHit(hit);        
         // TODO:  set convariance, 
         return track;
     }
 
+    private StraightTrack makeTrack(List<HelicalTrackHit> hits) {
+        SlopeInterceptLineFit xfit = FitToLine(hits, 0);
+        SlopeInterceptLineFit yfit = FitToLine(hits, 1);
+        if (debug)
+            System.out.println("xfit = " + xfit.toString());
+        if (debug)
+            System.out.println("yfit = " + yfit.toString());        
+        return makeTrack(hits, xfit, yfit);
+    }
+
+    /*
+     *   Get all seed combinations that make sense (pass checkSeed)
+     *   currently, just assume there are 3 seed layers (don't have to be first 3 though.  
+     */
+    private List<StraightTrack> getSeeds(int[] seedLayers, List<HelicalTrackHit> hits) {
+        List<StraightTrack> seeds = new ArrayList<>();
+        int nseeds = seedLayers.length;
+        if (nseeds == 3)//TODO ... set this up so that this works for arbitrary nseeds...use recursion
+            for (HelicalTrackHit h1 : HitCollectionUtilites.GetSortedHits(hits, seedLayers[0])) {
+                if (debug)
+                    System.out.println(h1.toString());
+                for (HelicalTrackHit h2 : HitCollectionUtilites.GetSortedHits(hits, seedLayers[1])) {
+                    if (debug)
+                        System.out.println(h2.toString());
+                    for (HelicalTrackHit h3 : HitCollectionUtilites.GetSortedHits(hits, seedLayers[2])) {
+                        if (debug)
+                            System.out.println(h3.toString());
+                        //make a 3-hit test track...see if it passes CheckTrack 
+                        List<HelicalTrackHit> testTrack = new ArrayList<HelicalTrackHit>();
+                        testTrack.add(h1);
+                        testTrack.add(h2);
+                        testTrack.add(h3);                       
+                        StraightTrack trk = makeTrack(testTrack);
+                        if (!checkerTrack.checkSeed(trk))
+                            break;
+                        seeds.add(trk);
+                    }
+                }
+            }
+        return seeds;
+    }
+
+    /*
+     * recursively extend the seeds through all of the extend layers..
+     * ...I think this should work...
+     */
+    private void extendTrack(int[] extendLayers, int n, StraightTrack origTrack, List<HelicalTrackHit> hits, List<StraightTrack> trackList) {
+        if (n >= extendLayers.length) {
+            if (debug)
+                System.out.println("Done finding this track through all " + n + " extra layers");
+            trackList.add(origTrack);
+            return;
+        }
+
+        boolean cannotExtendThisLayer = true;
+        if (debug)
+            System.out.println("Extending to layer " + extendLayers[n]);
+        for (HelicalTrackHit h : HitCollectionUtilites.GetSortedHits(hits, extendLayers[n])) {
+            //let's see if this hit makes sense to add to original track
+            if (!checkerHOT.checkNewHit(origTrack, h))
+                continue;
+
+            List<TrackerHit> origHits = origTrack.getTrackerHits();
+            //make a new list and cast them as HelicalTrackHits (Track only stores TrackerHits)
+            List<HelicalTrackHit> newHits = new ArrayList<>();
+            for (TrackerHit oh : origHits) {
+                HelicalTrackHit hoh = (HelicalTrackHit) oh;
+                System.out.println(hoh.getPosition()[0]);
+                newHits.add(hoh);
+            }
+            //add the new hit to the list & make new track
+            newHits.add(h);
+            StraightTrack newTrack = makeTrack(newHits);
+            //check the new track after we've added this hit
+            if (!checkerTrack.checkTrack(newTrack))
+                continue;
+            cannotExtendThisLayer = false;
+            //extend again to the next layer
+            extendTrack(extendLayers, n + 1, newTrack, hits, trackList);
+        }
+
+        //didn't find any hits in this layer that match the track...but let's try the next one
+        if (cannotExtendThisLayer)
+            extendTrack(extendLayers, n + 1, origTrack, hits, trackList);
+
+        return;
+    }
+
 }

java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield
TrackChecker.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield/TrackChecker.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/tracking/src/main/java/org/hps/recon/tracking/nobfield/TrackChecker.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -19,4 +19,9 @@
         return true;
     }
 
+    public boolean checkSeed(StraightTrack trk) {
+
+        return true;
+    }
+
 }

java/branches/hps_java_trunk_HPSJAVA-255/users
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -2,7 +2,7 @@
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-users</artifactId>
     <name>users</name>
-    <description>user code packages</description>
+    <description>miscellaneous user code</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
@@ -33,6 +33,7 @@
                     <excludes>
                         <exclude>org/hps/users/jeremym/MockDataChallengeDiagnosticDriverTest.java</exclude>
                         <exclude>org/hps/users/ngraf/NearestNeighborClusterDriverTest.java</exclude>
+			<exclude>org/hps/users/holly/ECalClusterICTest.java</exclude>
                     </excludes>
                 </configuration>
             </plugin>

java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/celentan
StripChartTest.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/celentan/StripChartTest.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/celentan/StripChartTest.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,4 +1,4 @@
-package java.org.hps.users.celentan;
+package org.hps.users.celentan;
 
 /**
  * Proof of principle Driver for plotting a sub-system's data using a strip chart.

java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/jeremym
TestRunReconDriver.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/jeremym/TestRunReconDriver.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/jeremym/TestRunReconDriver.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -9,10 +9,10 @@
 import org.hps.recon.tracking.TrackerReconDriver;
 import org.lcsim.event.EventHeader;
 import org.lcsim.geometry.Detector;
+import org.lcsim.recon.tracking.digitization.sisim.config.RawTrackerHitSensorSetup;
+import org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver;
 import org.lcsim.util.Driver;
 
-import org.lcsim.recon.tracking.digitization.sisim.config.ReadoutCleanupDriver;
-
 /**
  * <p>
  * This is a Driver that does the same thing as this steering file:
@@ -31,6 +31,9 @@
         CalibrationDriver calibrationDriver = new CalibrationDriver();
         calibrationDriver.setRunNumber(975);
         this.add(calibrationDriver);
+        
+        RawTrackerHitSensorSetup rawTrackerHitDriver = new RawTrackerHitSensorSetup();
+        this.add(rawTrackerHitDriver);
 
         EcalRawConverterDriver ecalRawConverter = new EcalRawConverterDriver();
         ecalRawConverter.setEcalCollectionName("EcalCalHits");

java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca
FEETrigger.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/FEETrigger.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/FEETrigger.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -88,12 +88,12 @@
 					// Determine in which region the cluster is located
 					// and increment the counter for that region. Zones
 					// are defined as:
-					// Zone 1 is -13 < ix < -4 and 14 < ix < 21
+					// Zone 1 is -13 < ix < -4 and 14 < ix < 21  MISTAKE!!! it's all reversed!! remember!!!
 					// Zone 2 is -20 < ix < -14 and ix > 20
-					// Zone 3 is -23 <= ix < -19
-					if(-23 <= ix && ix < -19) { zone3Count++; }
-					if((-20 < ix && ix < -14) || (ix > 20))  { zone2Count++; }
-					if((-13 < ix && ix < -4) || (14 < ix && ix < 21)) { zone1Count++; }
+					// Zone 3 is -23 <= ix < -18
+					if( ix > 18 || ix < -22) { zone3Count++; }
+					if(ix < 19 && ix  > 12 )  { zone2Count++; }
+					if((ix > 4 && ix < 13) || (ix > -23 && ix < -14)) { zone1Count++; }
 				}
 			}
 		}

java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca
ReconDataPos.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/ReconDataPos.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/ReconDataPos.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -15,9 +15,8 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Queue;
-import org.hps.readout.ecal.ClockSingleton;
-import org.hps.readout.ecal.TriggerDriver;
 
+
 import org.hps.recon.ecal.ECalUtils;
 import org.hps.recon.ecal.HPSEcalCluster;
 import org.lcsim.event.Cluster;
@@ -30,6 +29,7 @@
 import java.io.FileWriter;
 import org.lcsim.event.CalorimeterHit;
 import org.lcsim.event.MCParticle;
+import org.lcsim.event.ReconstructedParticle;
 
 /**
  * 
@@ -154,9 +154,34 @@
  public void process (EventHeader event){
    
           
-     
+     //see if it has the reconstructed collectiom
+     if(event.hasCollection(ReconstructedParticle.class, "FinalStateParticles")){
+ List<ReconstructedParticle> particles = event.get(ReconstructedParticle.class, "FinalStateParticles");
+ 
+ for(ReconstructedParticle particle: particles){
+ 
+     if(particle.getCharge()>0){
+        
+         System.out.println(particle.getEnergy()*particle.getEnergy()-particle.getMomentum().magnitudeSquared()+"\n");
+                   
+       double mass=Math.sqrt(particle.getEnergy()*particle.getEnergy() - particle.getMomentum().magnitudeSquared());
+       List<Cluster> clusters = particle.getClusters();
+      
+      for(Cluster cluster : clusters){
+      
+          int id=getCrystal(cluster);
+          try{
+          writer.append(id + " " + cluster.getEnergy() + " " + cluster.getSize() + " " + HPSEcalCluster.getSeedHit(cluster).getCorrectedEnergy() + " " + HPSEcalCluster.getSeedHit(cluster).getIdentifierFieldValue("ix")+" " +HPSEcalCluster.getSeedHit(cluster).getIdentifierFieldValue("iy")+ "\n");
+          }
+          
+        catch(IOException e ){System.err.println("Error writing to output for event display");} 
+
            
-     
+      }
+      
+     }
+ }
+     }
      //get the clusters from the event
      if(event.hasCollection(Cluster.class, "EcalClusters")) {
         List<Cluster> clusterList =event.get(Cluster.class,clusterCollectionName );    

java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca
TriggerAna.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/TriggerAna.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/TriggerAna.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -42,18 +42,18 @@
     int clusterWindow=50;
     int TotalCluster=0;
     double timeDifference;
-    double energyThreshold=1.5;
+    double energyThreshold=0;
     private LinkedList<ArrayList<HPSEcalCluster>> clusterBuffer;
     protected String clusterCollectionName = "EcalClusters";
     
- AIDA aida = AIDA.defaultInstance();
-IHistogram1D clusterEne=aida.histogram1D("Clusters energy with Kyle's trigger",300, 0, 3);
+ //AIDA aida = AIDA.defaultInstance();
+//IHistogram1D clusterEne=aida.histogram1D("Clusters energy with Kyle's trigger",300, 0, 3);
     private FileWriter writer;
-    private FileWriter writer2;
+   // private FileWriter writer2;
     private FileWriter writer3;
     private FileWriter writer4;
     String outputFileName = "KyleTriggerFEE.txt";
-    String outputFileName2 = "KyleTriggerHits.txt";
+  //  String outputFileName2 = "KyleTriggerHits.txt";
     String outputFileName3 = "NoTriggerFEE.txt";
    
    
@@ -74,8 +74,8 @@
    public void setOutputFileName(String outputFileName){
 this.outputFileName = outputFileName;
 }
-   public void setOutputFileName2(String outputFileName2){
-this.outputFileName2 = outputFileName2;
+   public void setOutputFileName3(String outputFileName3){
+this.outputFileName3 = outputFileName3;
    }
    public void settimeDifference(double time){
    this.timeDifference=time;
@@ -105,12 +105,12 @@
     try{
     //initialize the writers
     writer=new FileWriter(outputFileName);
-    writer2=new FileWriter(outputFileName2);
+    //writer2=new FileWriter(outputFileName2);
     writer3=new FileWriter(outputFileName3);
     
     //Clear the files
     writer.write("");
-    writer2.write("");
+   // writer2.write("");
     writer3.write("");
     
     
@@ -123,12 +123,12 @@
   
 @Override
 public void endOfData(){
-System.out.println("Ho contato" + TotalCluster + " clusters di cui " + Clustercount + "isolati\n");
+//System.out.println("Ho contato" + TotalCluster + " clusters di cui " + Clustercount + "isolati\n");
     
     try{
 //close the file writer.
     writer.close();
-    writer2.close();
+ //   writer2.close();
     writer3.close();
     
     }
@@ -163,58 +163,11 @@
              
      //put the clusters in the arraylist
      
-     ArrayList<HPSEcalCluster> clusterSet = new ArrayList<HPSEcalCluster>(); 
+     
      for(HPSEcalCluster cluster : clusterList){
-         clusterEne.fill(cluster.getEnergy());
+        // clusterEne.fill(cluster.getEnergy());
          TotalCluster++;
-         clusterSet.add(cluster);
-     }
-     //remove the last event from cluster buffer and add the new one
-     clusterBuffer.removeLast();
-     clusterBuffer.addFirst(clusterSet);
-    //Run the sorting algorithm;
-     ClusterAnalyzer();
-     }
-     
-      //get the hits from the event
-     if(event.hasCollection(CalorimeterHit.class,"EcalCorrectedHits")){
-     List<CalorimeterHit> hits =event.get(CalorimeterHit.class,"EcalCorrectedHits");
-     
-     for(CalorimeterHit hit : hits){
-     int id=getCrystal(hit)-1;
-     
-     try{    writer2.append(id + " " + hit.getRawEnergy()+ "\n");}
-      catch(IOException e ){System.err.println("Error writing to output for event display");} 
-     }
-     
-     }
-     
-    }
-     
-}
-
- 
- /**
-  * For each crystal, looks for clusters that hit that clystar, if it is an isolated cluster, it's put in goodclusterqueue
-  */
- public void ClusterAnalyzer(){
- //get the cluster list at the current time in the buffer
-ArrayList<HPSEcalCluster> currentClusters = clusterBuffer.get(clusterWindow+1);
-
-
- ///cerca i cluster nella posizione che ci interessa poi chiama la funzione che decide se sono "isolati"
-   //System.out.println("Sta partendo il for sulla Queue \n");
- for(int y=-5;y<6;y++){
-     for(int x=-23;x<24;x++){
-      posx=x;
-      posy=y;
-         
-         //ciclo for nel set di currentCluster, ovvero il set nel mezzo del buffer
-    for(HPSEcalCluster cluster : currentClusters){ 
-    if((cluster.getSeedHit().getIdentifierFieldValue("ix")== posx) && (cluster.getSeedHit().getIdentifierFieldValue("iy")==posy )&& (cluster.getEnergy() > energyThreshold)){
-        
-           if(ClusterChecker(cluster)){
-            int id;
+        int id;
             Clustercount++;
            id=getCrystal(cluster);
            try{
@@ -227,60 +180,15 @@
      }
      
    catch(IOException e ){System.err.println("Error writing to output for event display");}   
-           
-           }
-      }
+     }
+   
+     }
      
      
+     
     }
- 
- 
- 
- }
- }
- 
- 
- 
- 
-
- }
- /**
-  * Check if the cluster is isolaterd checking if there are clusters near it in time and in space in the buffer
-  * @param cluster
-  * @return 
-  */
- 
-public boolean ClusterChecker (HPSEcalCluster cluster){
-//System.out.println("Sono nel clustercheck! \n");
-    
-boolean check=true;
-  
-    //ciclo sulle liste del buffer
-loops:
-     for(ArrayList<HPSEcalCluster> currentList : clusterBuffer){
-     //ciclo sui cluster della lista corrente
-         for(HPSEcalCluster currentcluster : currentList){
-           if(currentcluster!= cluster){
-             //if there is a cluster in the buffer that is in the considered radius in a time window lower than expected, the loop is brocken and the analyzed cluster is not good
-         if(!((currentcluster.getSeedHit().getIdentifierFieldValue("ix") < posx-radius || currentcluster.getSeedHit().getIdentifierFieldValue("ix")> posx+radius)&& (currentcluster.getSeedHit().getIdentifierFieldValue("iy")< posy-radius || currentcluster.getSeedHit().getIdentifierFieldValue("iy")> posy+radius))&& Math.abs(cluster.getSeedHit().getTime()-currentcluster.getSeedHit().getTime())<timeDifference){
-         check=false;
-         break loops;
-         }
-           }
-           
-        
-         
-         }
-      
      
-     }
-        
-        
-   
-return check;
-
 }
-      
  
  
  public int getCrystal (HPSEcalCluster cluster){

java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca
mycluster3.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/mycluster3.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/luca/mycluster3.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -42,19 +42,19 @@
     int clusterWindow=50;
     int TotalCluster=0;
     double timeDifference;
-    double energyThreshold=1.5;
+    double energyThreshold=0;
     private LinkedList<ArrayList<HPSEcalCluster>> clusterBuffer;
     protected String clusterCollectionName = "EcalClusters";
     
- AIDA aida = AIDA.defaultInstance();
- IHistogram1D clusterEne=aida.histogram1D("Clusters energy with Luca's trigger",300, 0, 3);
+ //AIDA aida = AIDA.defaultInstance();
+// IHistogram1D clusterEne=aida.histogram1D("Clusters energy with Luca's trigger",300, 0, 3);
 // ArrayList<IHistogram1D> SeedHistograms = new ArrayList<IHistogram1D>(442);
   //  ArrayList<IHistogram1D> ClustHistograms = new ArrayList<IHistogram1D>(442);
  //    ArrayList<IHistogram1D> HitHistograms = new ArrayList<IHistogram1D>(442);
     private FileWriter writer;
-    private FileWriter writer2;
+  //  private FileWriter writer2;
     String outputFileName = "LucaTriggerFEE.txt";
-    String outputFileName2 = "LucaTriggerHits.txt";
+ //   String outputFileName2 = "LucaTriggerHits.txt";
 
    
  
@@ -74,9 +74,9 @@
    public void setOutputFileName(String outputFileName){
 this.outputFileName = outputFileName;
 }
-   public void setOutputFileName2(String outputFileName2){
-this.outputFileName2 = outputFileName2;
-   }
+///   public void setOutputFileName2(String outputFileName2){
+//this.outputFileName2 = outputFileName2;
+   //}
    public void settimeDifference(double time){
    this.timeDifference=time;
    
@@ -105,10 +105,10 @@
     try{
     //initialize the writers
     writer=new FileWriter(outputFileName);
-    writer2=new FileWriter(outputFileName2);
+   // writer2=new FileWriter(outputFileName2);
     //Clear the files
     writer.write("");
-    writer2.write("");
+   // writer2.write("");
     
      //initialize histograms  
   /*  for(int t=0; t<442; t++){
@@ -138,7 +138,7 @@
     try{
 //close the file writer.
     writer.close();
-    writer2.close();
+    //writer2.close();
     }
 catch(IOException e){
     System.err.println("Error closing utput file for event display.");
@@ -153,39 +153,35 @@
            
      
      //get the clusters from the event
-    if(TriggerDriver.triggerBit()){ //if they have triggered!
+   // if(TriggerDriver.triggerBit()){ //if they have triggered!
+      
      if(event.hasCollection(HPSEcalCluster.class, "EcalClusters")) {
+         
         List<HPSEcalCluster> clusterList =event.get(HPSEcalCluster.class,clusterCollectionName );    
              
      //put the clusters in the arraylist
      
      ArrayList<HPSEcalCluster> clusterSet = new ArrayList<HPSEcalCluster>(); 
      for(HPSEcalCluster cluster : clusterList){
-         clusterEne.fill(cluster.getEnergy());
+      //   clusterEne.fill(cluster.getEnergy());
          TotalCluster++;
          clusterSet.add(cluster);
+     
+    
+     
      }
      //remove the last event from cluster buffer and add the new one
      clusterBuffer.removeLast();
      clusterBuffer.addFirst(clusterSet);
     //Run the sorting algorithm;
      ClusterAnalyzer();
-     }
      
-      //get the hits from the event
-     if(event.hasCollection(CalorimeterHit.class,"EcalCorrectedHits")){
-     List<CalorimeterHit> hits =event.get(CalorimeterHit.class,"EcalCorrectedHits");
-     
-        for(CalorimeterHit hit : hits){
-            int id=getCrystal(hit)-1;
-          //  HitHistograms.get(id).fill(hit.getRawEnergy());
-                try{    writer2.append(id + " " + hit.getRawEnergy()+ "\n");}
-                catch(IOException e ){System.err.println("Error writing to output for event display");} 
-        }//end of for cycle
      }
      
-    }
+      
      
+   // }// questa parentesi va scommentata se si scommenta l'if del trigger
+//     
 }
 
  

java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/sfegan
HPSECalTestFegan.java 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/sfegan/HPSECalTestFegan.java	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/users/src/main/java/org/hps/users/sfegan/HPSECalTestFegan.java	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,4 +1,4 @@
-package main.java.org.hps.users.sfegan;
+package org.hps.users.sfegan;
 
 
 /**

java/branches/hps_java_trunk_HPSJAVA-255/util
pom.xml 1243 -> 1244
--- java/branches/hps_java_trunk_HPSJAVA-255/util/pom.xml	2014-10-18 00:25:29 UTC (rev 1243)
+++ java/branches/hps_java_trunk_HPSJAVA-255/util/pom.xml	2014-10-19 06:41:14 UTC (rev 1244)
@@ -1,34 +1,24 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    
     <modelVersion>4.0.0</modelVersion>
     <artifactId>hps-util</artifactId>
     <name>util</name>
-    <description>miscellaneous utility classes</description>
-    
+    <description>various utility classes</description>
     <parent>
         <groupId>org.hps</groupId>
         <artifactId>hps-parent</artifactId>
         <relativePath>../parent/pom.xml</relativePath>
         <version>3.0.3-SNAPSHOT</version>
     </parent>
-    
     <scm>
         <url>http://java.freehep.org/svn/repos/hps/list/java/trunk/util/</url>
         <connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/util/</connection>
         <developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/util/</developerConnection>
     </scm>
-
     <dependencies>
         <dependency>
-            <groupId>org.lcsim</groupId>
-            <artifactId>lcsim-distribution</artifactId>
-            <version>${lcsimVersion}</version>
-        </dependency>
-        <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-math3</artifactId>
             <version>3.2</version>
         </dependency>
     </dependencies>
-    
 </project>
SVNspam 0.1