LISTSERV mailing list manager LISTSERV 16.5

Help for HPS-SVN Archives


HPS-SVN Archives

HPS-SVN Archives


HPS-SVN@LISTSERV.SLAC.STANFORD.EDU


View:

Message:

[

First

|

Previous

|

Next

|

Last

]

By Topic:

[

First

|

Previous

|

Next

|

Last

]

By Author:

[

First

|

Previous

|

Next

|

Last

]

Font:

Proportional Font

LISTSERV Archives

LISTSERV Archives

HPS-SVN Home

HPS-SVN Home

HPS-SVN  January 2015

HPS-SVN January 2015

Subject:

r1936 - in /java/trunk/ecal-recon/src: main/java/org/hps/recon/ecal/cluster/ test/java/org/hps/recon/ test/java/org/hps/recon/ecal/cluster/

From:

[log in to unmask]

Reply-To:

Notification of commits to the hps svn repository <[log in to unmask]>

Date:

Thu, 15 Jan 2015 18:28:52 -0000

Content-Type:

text/plain

Parts/Attachments:

Parts/Attachments

text/plain (1159 lines)

Author: [log in to unmask]
Date: Thu Jan 15 10:28:42 2015
New Revision: 1936

Log:
Additional work and improvements on new cluster package.

Added:
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CTPClusterDriver.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterDriver.java
Removed:
    java/trunk/ecal-recon/src/test/java/org/hps/recon/ECalClusterICTest.java
Modified:
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/DefaultClusterPropertyCalculator.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterDriver.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java
    java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterer.java
    java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java

Added: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CTPClusterDriver.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CTPClusterDriver.java	(added)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CTPClusterDriver.java	Thu Jan 15 10:28:42 2015
@@ -0,0 +1,21 @@
+package org.hps.recon.ecal.cluster;
+
+/**
+ * This is a Driver to wrap the GTPClusterer algorithm,
+ * allowing the <code>limitClusterRange</code> to be
+ * set publicly.
+ * 
+ * @see GTPClusterer
+ * 
+ * @author Jeremy McCormick <[log in to unmask]>
+ */
+public class CTPClusterDriver extends ClusterDriver {
+    
+    public CTPClusterDriver() {
+        clusterer = ClustererFactory.create("CTPClusterer");
+    }
+    
+    public void setClusterWindow(int clusterWindow) {
+        getClusterer().getCuts().setValue("clusterWindow", clusterWindow);
+    }
+}

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java	Thu Jan 15 10:28:42 2015
@@ -32,21 +32,23 @@
 public class ClusterDriver extends Driver {
     
     protected static Logger logger = LogUtil.create(ClusterDriver.class, new BasicFormatter(ClusterDriver.class.getSimpleName()));
+    
     protected String ecalName = "Ecal";    
     protected HPSEcal3 ecal;
     protected NeighborMap neighborMap;
     protected String outputClusterCollectionName = "EcalClusters";
     protected String inputHitCollectionName = "EcalCalHits";
     protected Clusterer clusterer;
+    protected double[] cuts;
+    
     protected boolean createEmptyClusterCollection = true;
     protected boolean raiseErrorNoHitCollection = false;
     protected boolean skipNoClusterEvents = false;
     protected boolean writeClusterCollection = true;
     protected boolean storeHits = true;
-    protected double[] cuts;
-    protected boolean sortHits;
-    protected boolean calculateProperties;
-    protected boolean applyCorrections;
+    protected boolean calculateProperties = false;
+    protected boolean applyCorrections = false;
+    protected boolean sortHits = false;
     
     /**
      * No argument constructor.
@@ -216,7 +218,7 @@
     /**
      * This method implements the default clustering procedure based on input parameters.
      */
-    public void process(EventHeader event) {
+    public void process(EventHeader event) {               
         if (event.hasCollection(CalorimeterHit.class, inputHitCollectionName)) {       
             List<CalorimeterHit> hits = event.get(CalorimeterHit.class, inputHitCollectionName);
             logger.fine("input hit collection " + inputHitCollectionName + " has " + hits.size() + " hits");

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java	Thu Jan 15 10:28:42 2015
@@ -3,6 +3,7 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -14,6 +15,7 @@
 import org.lcsim.event.MCParticle;
 import org.lcsim.event.SimCalorimeterHit;
 import org.lcsim.event.base.BaseCluster;
+import org.lcsim.event.base.ClusterPropertyCalculator;
 import org.lcsim.geometry.subdetector.HPSEcal3;
 
 /**
@@ -330,10 +332,7 @@
     
     /**
      * Apply HPS-specific energy and position corrections to a list of clusters in place.
-     * 
-     * @see DefaultClusterPropertyCalculator
-     * @see ClusterPositionCorrection
-     * @see ClusterEnergyCorrection
+     * @param clusters The list of clusters.
      */
     public static void applyCorrections(List<Cluster> clusters) {
                 
@@ -355,10 +354,7 @@
     
     /**
      * Apply HPS-specific energy and position corrections to a cluster.
-     * 
-     * @see DefaultClusterPropertyCalculator
-     * @see ClusterPositionCorrection
-     * @see ClusterEnergyCorrection
+     * @param cluster The input cluster.
      */
     public static void applyCorrections(Cluster cluster) {
                             
@@ -376,11 +372,20 @@
     
     /**
      * Call {@link org.lcsim.event.base.BaseCluster#calculateProperties()}
-     * on all clusters in the list.
+     * on all clusters in the list using the default calculator.
      * @param clusters The list of clusters.
      */
     public static void calculateProperties(List<Cluster> clusters) {
-        DefaultClusterPropertyCalculator calc = new DefaultClusterPropertyCalculator();
+        calculateProperties(clusters, new DefaultClusterPropertyCalculator());
+    }    
+    
+    /**
+     * Call {@link org.lcsim.event.base.BaseCluster#calculateProperties()}
+     * on all clusters in the list using the given calculator.
+     * @param clusters The list of clusters.
+     * @param calc The property calculator.
+     */
+    public static void calculateProperties(List<Cluster> clusters, ClusterPropertyCalculator calc) {
         for (Cluster cluster : clusters) {
             if (cluster instanceof BaseCluster) {
                 BaseCluster baseCluster = (BaseCluster)cluster;
@@ -389,4 +394,39 @@
             }
         }
     }    
+    
+    /**
+     * Create a map between a particle and its list of hits from a Cluster.
+     * @param cluster The input cluster.
+     * @return A map between a particle and its hits.
+     */
+    public static Map<MCParticle, List<SimCalorimeterHit>> createParticleHitMap(Cluster cluster) {
+        Map<MCParticle, List<SimCalorimeterHit>> particleHitMap = new HashMap<MCParticle, List<SimCalorimeterHit>>();
+        for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
+            if (hit instanceof SimCalorimeterHit) {
+                SimCalorimeterHit simHit = (SimCalorimeterHit) hit;
+                for (int i = 0; i < simHit.getMCParticleCount(); i++) {
+                    MCParticle particle = simHit.getMCParticle(i);
+                    if (particleHitMap.get(particle) == null) {
+                        particleHitMap.put(particle, new ArrayList<SimCalorimeterHit>());
+                    }
+                    particleHitMap.get(particle).add(simHit);
+                }
+            }
+        }
+        return particleHitMap;
+    }
+    
+    /**
+     * Get the set of hits from a list of clusters.
+     * @param The input cluster list.
+     * @return The list of hits from all the clusters.
+     */
+    public static Set<CalorimeterHit> getHits(List<Cluster> clusters) {
+        Set<CalorimeterHit> hits = new HashSet<CalorimeterHit>();
+        for (Cluster cluster : clusters) {
+            hits.addAll(cluster.getCalorimeterHits());
+        }
+        return hits;
+    }
 }

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/DefaultClusterPropertyCalculator.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/DefaultClusterPropertyCalculator.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/DefaultClusterPropertyCalculator.java	Thu Jan 15 10:28:42 2015
@@ -47,6 +47,8 @@
      * {@link org.lcsim.event.base.TensorClusterPropertyCalculator#calculateProperties(List)}.
      */    
     public void calculateProperties(Cluster cluster) {
+        
+        reset();
                 
         calculateMaxDistance(cluster.getEnergy(), cluster.getParticleId() == 11 || cluster.getParticleId() == 0);
         

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterDriver.java	Thu Jan 15 10:28:42 2015
@@ -28,4 +28,8 @@
         GTPClusterer gtpClusterer = getClusterer();
         gtpClusterer.setLimitClusterRange(limitClusterRange);
     }        
+    
+    public void setClusterWindow(int clusterWindow) {
+        getClusterer().getCuts().setValue("clusterWindow", clusterWindow);
+    }
 }

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterDriver.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterDriver.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterDriver.java	Thu Jan 15 10:28:42 2015
@@ -42,27 +42,28 @@
     }
     
     public void setHitEnergyThreshold(double hitEnergyThreshold) {
-        getReconClusterer().getCuts().setValue("hitEnergyThreshold", hitEnergyThreshold);
+        getClusterer().getCuts().setValue("hitEnergyThreshold", hitEnergyThreshold);
     }
     
     public void setSeedEnergyThreshold(double seedEnergyThreshold) {
-        getReconClusterer().getCuts().setValue("seedEnergyThreshold", seedEnergyThreshold);
+        getClusterer().getCuts().setValue("seedEnergyThreshold", seedEnergyThreshold);
     }
     
     public void setClusterEnergyThreshold(double clusterEnergyThreshold) {
-        getReconClusterer().getCuts().setValue("clusterEnergyThreshold", clusterEnergyThreshold);
+        getClusterer().getCuts().setValue("clusterEnergyThreshold", clusterEnergyThreshold);
     }
     
     public void setMinTime(double minTime) {
-        getReconClusterer().getCuts().setValue("minTime", minTime);
+        getClusterer().getCuts().setValue("minTime", minTime);
     }
     
     public void setTimeWindow(double timeWindow) {
-        getReconClusterer().getCuts().setValue("timeWindow", timeWindow);
+        getClusterer().getCuts().setValue("timeWindow", timeWindow);
     }
     
     public void setUseTimeCut(boolean useTimeCut) {
-        getReconClusterer().setUseTimeCut(useTimeCut);
+        ReconClusterer clusterer = getClusterer();
+        clusterer.setUseTimeCut(useTimeCut);
     }    
         
     /**

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ReconClusterer.java	Thu Jan 15 10:28:42 2015
@@ -6,16 +6,11 @@
 
 import java.awt.Point;
 import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.hps.recon.ecal.cluster.AbstractClusterer;
-import org.hps.recon.ecal.cluster.ClusterType;
-import org.hps.recon.ecal.cluster.Clusterer;
 import org.lcsim.detector.IGeometryInfo;
 import org.lcsim.detector.solids.Trd;
 import org.lcsim.event.CalorimeterHit;
@@ -27,23 +22,25 @@
  * <p>
  * This class creates clusters from a CalorimeterHit input collection.
  * <p>
- * This clustering logic is based on that from the 
- * <a href="https://misportal.jlab.org/ul/Physics/Hall-B/clas/viewFile.cfm/2005-001.pdf?documentId=6">CLAS-Note-2005-001</a>.
+ * This clustering logic is based on that from the <a
+ * href="https://misportal.jlab.org/ul/Physics/Hall-B/clas/viewFile.cfm/2005-001.pdf?documentId=6"
+ * >CLAS-Note-2005-001</a>.
  * <p>
- * The analysis and position corrections are described in 
- * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>.
+ * The analysis and position corrections are described in <a
+ * href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note
+ * 2014-001</a>.
  * <p>
- * The algorithm sorts hits from highest to lowest energy and builds clusters around each local maximum/seed hit. 
- * Common hits are distributed between clusters when minimum between two clusters. There is a threshold cut for 
- * minimum hit energy, minimum cluster energy, and minimum seed hit energy. There is also a timing threshold with 
- * respect to the seed hit. All of these parameters are tunable and should be refined with more analysis. Energy 
- * corrections are applied separately.
+ * The algorithm sorts hits from highest to lowest energy and builds clusters around each local
+ * maximum/seed hit. Common hits are distributed between clusters when minimum between two clusters.
+ * There is a threshold cut for minimum hit energy, minimum cluster energy, and minimum seed hit
+ * energy. There is also a timing threshold with respect to the seed hit. All of these parameters
+ * are tunable and should be refined with more analysis. Energy corrections are applied separately.
  * 
  * @see AbstractClusterer
  * @see Clusterer
  * 
  * @author Holly Szumila-Vance <[log in to unmask]>
- * @author Kyle McCarty <[log in to unmask]> 
+ * @author Kyle McCarty <[log in to unmask]>
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class ReconClusterer extends AbstractClusterer {
@@ -78,7 +75,7 @@
         super(new String[] { "hitEnergyThreshold", "seedEnergyThreshold", "clusterEnergyThreshold", "minTime", "timeWindow" }, 
                 new double[] { 0.0075, 0.1, 0.3, 0.0, 20.0 });
     }
-    
+
     void setUseTimeCut(boolean useTimeCut) {
         this.useTimeCut = useTimeCut;
     }
@@ -91,9 +88,9 @@
         }
         clusterEnergyThreshold = getCuts().getValue("clusterEnergyThreshold");
         minTime = getCuts().getValue("minTime");
-        timeWindow = getCuts().getValue("timeWindow");      
-    }
-    
+        timeWindow = getCuts().getValue("timeWindow");
+    }
+
     /**
      * Get the list of rejected hits that was made from processing the last event.
      * @return The list of rejected hit.
@@ -101,12 +98,15 @@
     List<CalorimeterHit> getRejectedHitList() {
         return this.rejectedHitList;
     }
-    
-   
-    public List<Cluster> createClusters(EventHeader event, List<CalorimeterHit> hits) {	
+
+    public List<Cluster> createClusters(EventHeader event, List<CalorimeterHit> hits) {
+
+        // I am pretty sure this map must be cleared between events. --JM
+        correctedPositionMap.clear();
+
         // Create a list to store the event hits in.
         List<CalorimeterHit> hitList = hits;
-        
+
         // Create a list to store the newly created clusters in.
         ArrayList<Cluster> clusterList = new ArrayList<Cluster>();
 
@@ -116,7 +116,7 @@
 
         // Sort the list of hits by energy.
         ClusterUtilities.sortHitsUniqueEnergy(hitList);
-        
+
         // Filter the hit list of any hits that fail to pass the
         // designated threshold.
         for (int index = hitList.size() - 1; index >= 0; index--) {
@@ -137,13 +137,10 @@
 
         // Create a map to connect the cell ID of a calorimeter crystal to the hit which occurred in
         // that crystal.
-        HashMap<Long, CalorimeterHit> hitMap = new HashMap<Long, CalorimeterHit>();
-        for (CalorimeterHit hit : hitList) {
-            hitMap.put(hit.getCellID(), hit);
-        }
-        
+        Map<Long, CalorimeterHit> hitMap = ClusterUtilities.createHitMap(hitList);
+
         // Create a map to connect a seed hit to its cluster.
-        Map<CalorimeterHit, BaseCluster> seedToCluster = new HashMap<CalorimeterHit,BaseCluster>();
+        Map<CalorimeterHit, BaseCluster> seedToCluster = new HashMap<CalorimeterHit, BaseCluster>();
 
         // Map a crystal to a list of all clusters in which it is a member.
         Map<CalorimeterHit, List<CalorimeterHit>> commonHits = new HashMap<CalorimeterHit, List<CalorimeterHit>>();
@@ -152,10 +149,10 @@
         HashMap<CalorimeterHit, CalorimeterHit> hitToSeed = new HashMap<CalorimeterHit, CalorimeterHit>();
 
         // Loop through all calorimeter hits to locate seeds and perform
-        // first pass calculations for component and common hits. 
+        // first pass calculations for component and common hits.
         for (int ii = 0; ii <= hitList.size() - 1; ii++) {
             CalorimeterHit hit = hitList.get(ii);
-           
+
             // Get the set of all neighboring crystals to the current hit.
             Set<Long> neighbors = neighborMap.get(hit.getCellID());
 
@@ -187,34 +184,33 @@
                     break seedHitLoop;
                 }
             }
-     
+
             // Hit is a local maximum
-            if (isSeed){
-            	// Seed must pass minimum threshold
-            	if (hit.getCorrectedEnergy() >= seedEnergyThreshold)
-            	{
-            		// Create new cluster
+            if (isSeed) {
+                // Seed must pass minimum threshold
+                if (hit.getCorrectedEnergy() >= seedEnergyThreshold) {
+                    // Create new cluster
                     BaseCluster cluster = createBasicCluster();
                     clusterList.add(cluster);
                     seedToCluster.put(hit, cluster);
                     hitToSeed.put(hit, hit);
-            		
-            	}
-            	// Seed does not pass minimum threshold
-            	else{
+
+                }
+                // Seed does not pass minimum threshold
+                else {
                     rejectedHitList.add(hit);
-            		hitList.remove(ii);
+                    hitList.remove(ii);
                     ii--;
-            	}            
+                }
             }// end if isSeed
 
             // If this hit is not a seed hit, see if it should be
             // attached to any neighboring seed hits.
-            else {     	
+            else {
                 // Sort through the list of neighboring hits.
                 for (CalorimeterHit neighborHit : neighborHits) {
                     // Check whether the neighboring hit is a seed.
-                  //  if (seedToCluster.containsKey(neighborHit)) {
+                    // if (seedToCluster.containsKey(neighborHit)) {
                     if (hitToSeed.get(neighborHit) == neighborHit) {
 
                         // If the neighboring hit is a seed hit and the
@@ -285,10 +281,12 @@
                 // hit with the current secondary hit's seed.
                 if (!equalEnergies(secondaryNeighborHit, secondaryHit)) {
                     hitToSeed.put(secondaryNeighborHit, hitToSeed.get(secondaryHit));
-                } else {continue;}
+                } else {
+                    continue;
+                }
             }
         } // End component hits loop.
-        
+
         // Performs second pass calculations for common hits.
         commonHitsLoop: for (CalorimeterHit clusteredHit : hitToSeed.keySet()) {
 
@@ -346,63 +344,58 @@
                 }
             }
         } // End common hits loop.
-        
+
         // Remove any common hits from the clustered hits collection.
         for (CalorimeterHit commonHit : commonHits.keySet()) {
             hitToSeed.remove(commonHit);
             hitList.remove(commonHit);
         }
 
-        
         /*
-         * All hits are sorted from above. The next part of the code is for 
-         * building the output cluster collections.
+         * All hits are sorted from above. The next part of the code is for building the output
+         * cluster collections.
          */
-        	// Add all hits except for common hits
-        	for (CalorimeterHit ihit : hitList)
-        	{
-        		CalorimeterHit iseed = hitToSeed.get(ihit);
-        		BaseCluster icluster = seedToCluster.get(iseed);
-        		icluster.addHit(ihit);
-        	}
-        
-        	// Add common hits
-        	for (Map.Entry<CalorimeterHit, List<CalorimeterHit>> commHit : commonHits.entrySet())
-        	{
-        		CalorimeterHit seedA = commHit.getValue().get(0);
-        		CalorimeterHit seedB = commHit.getValue().get(1);
-        		double eclusterA = seedToCluster.get(seedA).getEnergy();
-        		double eclusterB = seedToCluster.get(seedB).getEnergy();       	
-        		double fractionA = eclusterA / (eclusterA + eclusterB);
-        		double fractionB = eclusterB / (eclusterA + eclusterB);        	
-        		double hitcontributionA = commHit.getKey().getCorrectedEnergy()*fractionA;
-        		double hitcontributionB = commHit.getKey().getCorrectedEnergy()*fractionB;
-        	
-        		BaseCluster clusterA = seedToCluster.get(seedA);
-        		BaseCluster clusterB = seedToCluster.get(seedB);
-
-        		clusterA.addHit(commHit.getKey(), hitcontributionA);
-        		clusterB.addHit(commHit.getKey(), hitcontributionB);
-        	}
-    
-        	// Remove clusters that do not pass cluster threshold and add to rejectedHitList.
-        	for (int j=0; j <= clusterList.size()-1; j++)
-        		{	
-        		Cluster checkcluster = clusterList.get(j);        	
-        		if (checkcluster.getEnergy() < clusterEnergyThreshold) {
-        			List<CalorimeterHit> clusterHits= checkcluster.getCalorimeterHits();
-        			for (CalorimeterHit nhit : clusterHits){
-        				rejectedHitList.add(nhit);
-        			}
-        			clusterList.remove(checkcluster);
-        			j--;
-        		} 
-        		else {    
-        			// Computer the position of the cluster and set it.
-        			calculatePosition(checkcluster);
-        			continue;
-        		}
-        	}
+        // Add all hits except for common hits
+        for (CalorimeterHit ihit : hitList) {
+            CalorimeterHit iseed = hitToSeed.get(ihit);
+            BaseCluster icluster = seedToCluster.get(iseed);
+            icluster.addHit(ihit);
+        }
+
+        // Add common hits
+        for (Map.Entry<CalorimeterHit, List<CalorimeterHit>> commHit : commonHits.entrySet()) {
+            CalorimeterHit seedA = commHit.getValue().get(0);
+            CalorimeterHit seedB = commHit.getValue().get(1);
+            double eclusterA = seedToCluster.get(seedA).getEnergy();
+            double eclusterB = seedToCluster.get(seedB).getEnergy();
+            double fractionA = eclusterA / (eclusterA + eclusterB);
+            double fractionB = eclusterB / (eclusterA + eclusterB);
+            double hitcontributionA = commHit.getKey().getCorrectedEnergy() * fractionA;
+            double hitcontributionB = commHit.getKey().getCorrectedEnergy() * fractionB;
+
+            BaseCluster clusterA = seedToCluster.get(seedA);
+            BaseCluster clusterB = seedToCluster.get(seedB);
+
+            clusterA.addHit(commHit.getKey(), hitcontributionA);
+            clusterB.addHit(commHit.getKey(), hitcontributionB);
+        }
+
+        // Remove clusters that do not pass cluster threshold and add to rejectedHitList.
+        for (int j = 0; j <= clusterList.size() - 1; j++) {
+            Cluster checkcluster = clusterList.get(j);
+            if (checkcluster.getEnergy() < clusterEnergyThreshold) {
+                List<CalorimeterHit> clusterHits = checkcluster.getCalorimeterHits();
+                for (CalorimeterHit nhit : clusterHits) {
+                    rejectedHitList.add(nhit);
+                }
+                clusterList.remove(checkcluster);
+                j--;
+            } else {
+                // Computer the position of the cluster and set it.
+                calculatePosition(checkcluster);
+                continue;
+            }
+        }
         return clusterList;
     }
 
@@ -430,44 +423,44 @@
         }
         return isSeed;
     }
-    
+
     /**
-     * Calculates the position of each cluster with no correction for particle type
-     * as documented in HPS Note 2014-001.
+     * Calculates the position of each cluster with no correction for particle type as documented in
+     * HPS Note 2014-001.
      * @param cluster
      */
-    private void calculatePosition(Cluster cluster){
+    private void calculatePosition(Cluster cluster) {
         final double w0 = 3.1;
         // calculated cluster x position
-    	double xCl = 0.0; 
-    	// calculated cluster y position
-        double yCl = 0.0; 
+        double xCl = 0.0;
+        // calculated cluster y position
+        double yCl = 0.0;
         double eNumX = 0.0;
         double eNumY = 0.0;
         double eDen = 0.0;
-        List<CalorimeterHit> clusterHits= cluster.getCalorimeterHits();
-		for (CalorimeterHit hit : clusterHits){
-			// This block fills a map with crystal to center of face of crystal
-	    	// Get the hit indices as a Point.
-	    	int ix = hit.getIdentifierFieldValue("ix");
-	    	int iy = hit.getIdentifierFieldValue("iy");
-	    	Point hitIndex = new Point(ix, iy);
-	    	
-	    	// Get the corrected position for this index pair.
-	    	double[] position = correctedPositionMap.get(hitIndex);
-	    	
-	    	if (position == null){
-	    		addToMap(hit);
-	    		position = correctedPositionMap.get(hitIndex);
-	    	}
-			
+        List<CalorimeterHit> clusterHits = cluster.getCalorimeterHits();
+        for (CalorimeterHit hit : clusterHits) {
+            // This block fills a map with crystal to center of face of crystal
+            // Get the hit indices as a Point.
+            int ix = hit.getIdentifierFieldValue("ix");
+            int iy = hit.getIdentifierFieldValue("iy");
+            Point hitIndex = new Point(ix, iy);
+
+            // Get the corrected position for this index pair.
+            double[] position = correctedPositionMap.get(hitIndex);
+
+            if (position == null) {
+                addToMap(hit);
+                position = correctedPositionMap.get(hitIndex);
+            }
+
             eNumX += Math.max(0.0, (w0 + Math.log(hit.getCorrectedEnergy() / cluster.getEnergy()))) * (correctedPositionMap.get(hitIndex)[0] / 10.0);
             eNumY += Math.max(0.0, (w0 + Math.log(hit.getCorrectedEnergy() / cluster.getEnergy()))) * (correctedPositionMap.get(hitIndex)[1] / 10.0);
             eDen += Math.max(0.0, (w0 + Math.log(hit.getCorrectedEnergy() / cluster.getEnergy())));
-			
-		}// end for iteration through clusterHits
-		
-		xCl = eNumX / eDen;
+
+        }// end for iteration through clusterHits
+
+        xCl = eNumX / eDen;
         yCl = eNumY / eDen;
 
         double[] clusterPosition = new double[3];
@@ -478,32 +471,30 @@
         Point hitIndex = new Point(ix, iy);
         clusterPosition[2] = correctedPositionMap.get(hitIndex)[2];
 
-        ((BaseCluster) cluster).setPosition(clusterPosition);    	    	
-    }
- 
+        ((BaseCluster) cluster).setPosition(clusterPosition);
+    }
+
     /**
-     * This constructs a mapping of a crystal to an x,y position at the face of the ecal.
-     * This fills the correctedPositionMap with the position of a crystal's face from the
-     * index x,y of the crystal.
+     * This constructs a mapping of a crystal to an x,y position at the face of the ecal. This fills
+     * the correctedPositionMap with the position of a crystal's face from the index x,y of the
+     * crystal.
      * @param hit
      */
-    private void addToMap(CalorimeterHit hit){
-    	int ix = hit.getIdentifierFieldValue("ix");
-    	int iy = hit.getIdentifierFieldValue("iy");
-    	Point hitIndex = new Point(ix, iy);
-    	// If the result is null, it hasn't been calculated yet.
-    	// Calculate the corrected position.
-    	IGeometryInfo geom = hit.getDetectorElement().getGeometry();
-    	double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()), (Hep3Vector) new BasicHep3Vector(0, 0, -1 * ((Trd) geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
-
-    	// Store the result in the map.
-    	correctedPositionMap.put(hitIndex, pos);      	
-    }
-    
-    
-    
+    private void addToMap(CalorimeterHit hit) {
+        int ix = hit.getIdentifierFieldValue("ix");
+        int iy = hit.getIdentifierFieldValue("iy");
+        Point hitIndex = new Point(ix, iy);
+        // If the result is null, it hasn't been calculated yet.
+        // Calculate the corrected position.
+        IGeometryInfo geom = hit.getDetectorElement().getGeometry();
+        double[] pos = geom.transformLocalToGlobal(VecOp.add(geom.transformGlobalToLocal(geom.getPosition()), (Hep3Vector) new BasicHep3Vector(0, 0, -1 * ((Trd) geom.getLogicalVolume().getSolid()).getZHalfLength()))).v();
+
+        // Store the result in the map.
+        correctedPositionMap.put(hitIndex, pos);
+    }
+
     public ClusterType getClusterType() {
         return ClusterType.RECON;
     }
-    
+
 }

Added: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterDriver.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterDriver.java	(added)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterDriver.java	Thu Jan 15 10:28:42 2015
@@ -0,0 +1,23 @@
+package org.hps.recon.ecal.cluster;
+
+
+/**
+ * This is an implementation of {@link ClusterDriver} specialized for the
+ * {@link SimpleReconClusterer}.  
+ *  
+ * @see SimpleReconClusterer
+ * 
+ * @author Jeremy McCormick <[log in to unmask]>
+ */
+public class SimpleReconClusterDriver extends ClusterDriver {
+                
+    public SimpleReconClusterDriver() {
+        // Setup the Clusterer with the correct type.
+        clusterer = ClustererFactory.create("SimpleReconClusterer");
+    }
+    
+    public void setUseTimeCut(boolean useTimeCut) {
+        SimpleReconClusterer clusterer = getClusterer();
+        clusterer.setUseTimeCut(useTimeCut);
+    }    
+}

Modified: java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterer.java
 =============================================================================
--- java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterer.java	(original)
+++ java/trunk/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleReconClusterer.java	Thu Jan 15 10:28:42 2015
@@ -31,18 +31,17 @@
     double minEnergy;
     double minTime;
     double timeWindow;
-    boolean timeCut;
+    boolean useTimeCut;
 
     /**
      * Initialize the algorithm with default cuts.
      */
     SimpleReconClusterer() {
-        super(new String[] { "minEnergy", "minTime", "timeWindow", "timeCut" }, new double[] { 0.001, 0.0, 20.0, 0. });
+        super(new String[] { "minEnergy", "minTime", "timeWindow" }, new double[] { 0.001, 0.0, 20.0 });
     }
 
     public void initialize() {
         // Setup class variables from cuts.
-        timeCut = (getCuts().getValue("timeCut") == 1.0);
         minEnergy = getCuts().getValue("minEnergy");
         minTime = getCuts().getValue("minTime");
         timeWindow = getCuts().getValue("timeWindow");
@@ -61,7 +60,7 @@
                 continue;
             }
             // if time cut is being used, reject hits outside the time window
-            if (timeCut && (h.getTime() < minTime || h.getTime() > minTime + timeWindow)) {
+            if (useTimeCut && (h.getTime() < minTime || h.getTime() > minTime + timeWindow)) {
                 continue;
             }
             sortedHitList.add(h);
@@ -129,4 +128,8 @@
     public ClusterType getClusterType() {
         return ClusterType.SIMPLE_RECON;
     }
+    
+    public void setUseTimeCut(boolean useTimeCut) {
+        this.useTimeCut = useTimeCut;
+    }
 }

Modified: java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java
 =============================================================================
--- java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java	(original)
+++ java/trunk/ecal-recon/src/test/java/org/hps/recon/ecal/cluster/ClustererTest.java	Thu Jan 15 10:28:42 2015
@@ -1,12 +1,15 @@
 package org.hps.recon.ecal.cluster;
 
 import hep.aida.ICloud1D;
+import hep.aida.ICloud2D;
 import hep.aida.IHistogram1D;
 import hep.aida.IHistogram2D;
 
 import java.io.File;
 import java.io.IOException;
 import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Level;
@@ -29,22 +32,28 @@
 
 /**
  * This test does basic sanity checks on the output from the Clusterer algorithms,
- * and it creates an AIDA file with useful plots.
+ * and it creates an AIDA file with some useful plots, as well as optionally writes
+ * an LCIO file with the event data plus the clusters.
  * 
  * @author Jeremy McCormick <[log in to unmask]>
  */
 public class ClustererTest extends TestCase {
     
-    static int nEvents = 100;
+    static int nEvents = 1000;
     static final String fileLocation = "http://www.lcsim.org/test/hps-java/MockDataReconTest.slcio";
     File inputFile;
     File testOutputDir;
     
     static class ClustererTestSetup {
         
-        boolean writeLcioFile = false;
-        boolean checkSeedHit = false;
-        boolean applyCorrections = false;        
+        boolean writeLcioFile;
+        boolean checkSeedHit;
+        boolean applyCorrections;        
+        boolean checkHitEnergy;
+        boolean checkPropCalc;
+        boolean checkPosition;
+        boolean calculateProperties;
+        boolean sortHits;
         double[] cuts = null;
         ClusterType clusterType; 
         
@@ -70,8 +79,33 @@
             return this;
         }        
         
+        ClustererTestSetup calculateProperties() {
+            calculateProperties = true;
+            return this;
+        }
+        
+        ClustererTestSetup sortHits() {
+            sortHits = true;
+            return this;
+        }
+        
+        ClustererTestSetup checkHitEnergy() {
+            checkHitEnergy = true;
+            return this;
+        }
+        
+        ClustererTestSetup checkPropCalc() {
+            checkPropCalc = true;
+            return this;
+        }
+        
         ClustererTestSetup checkClusterType(ClusterType clusterType) {
             this.clusterType = clusterType;
+            return this;
+        }
+        
+        ClustererTestSetup checkPosition() {
+            this.checkPosition = true;
             return this;
         }
     }
@@ -98,7 +132,12 @@
      */
     public void testReconClusterer() {        
         runClustererTest("ReconClusterer", 
-                new ClustererTestSetup().writeLcioFile().checkSeedHit().applyCorrections().checkClusterType(ClusterType.RECON));
+                new ClustererTestSetup()
+                    .writeLcioFile()
+                    .checkSeedHit()
+                    .checkClusterType(ClusterType.RECON)
+                    .checkHitEnergy()
+                    .checkPosition());
     }
     
     /**
@@ -106,7 +145,12 @@
      */
     public void testSimpleReconClusterer() {
         runClustererTest("SimpleReconClusterer", 
-                new ClustererTestSetup().writeLcioFile().checkSeedHit().checkClusterType(ClusterType.SIMPLE_RECON));
+                new ClustererTestSetup()
+                    .writeLcioFile()
+                    .checkSeedHit()
+                    .checkClusterType(ClusterType.SIMPLE_RECON)
+                    .checkHitEnergy()
+                    .checkPosition());
     }
     
     /**
@@ -114,7 +158,11 @@
      */
     public void testNearestNeighborClusterer() {    
         runClustererTest("NearestNeighborClusterer", 
-                new ClustererTestSetup().writeLcioFile().checkClusterType(ClusterType.NN));
+                new ClustererTestSetup(new double[] { 0.0075, 3 })
+                    .writeLcioFile()
+                    .checkClusterType(ClusterType.NN)
+                    .checkHitEnergy()
+                    .checkPosition());
     }
     
     /**
@@ -122,7 +170,11 @@
      */
     public void testLegacyClusterer() {
         runClustererTest("LegacyClusterer", 
-                new ClustererTestSetup().writeLcioFile().checkClusterType(ClusterType.LEGACY));
+                new ClustererTestSetup()
+                    .writeLcioFile()
+                    .checkClusterType(ClusterType.LEGACY)
+                    .checkHitEnergy()
+                    .checkPosition());
     }
     
     /**
@@ -130,7 +182,11 @@
      */
     public void testGTPOnlineClusterer() {
         runClustererTest("GTPOnlineClusterer", 
-                new ClustererTestSetup().writeLcioFile().checkSeedHit().checkClusterType(ClusterType.GTP_ONLINE));
+                new ClustererTestSetup()
+                    .writeLcioFile().checkSeedHit()
+                    .checkClusterType(ClusterType.GTP_ONLINE)
+                    .checkHitEnergy()
+                    .checkPosition());
     }
     
     /**
@@ -138,7 +194,10 @@
      */
     public void testCTPClusterer() {
         runClustererTest("CTPClusterer", 
-                new ClustererTestSetup().writeLcioFile().checkClusterType(ClusterType.CTP));
+                new ClustererTestSetup()
+                    .writeLcioFile()
+                    .checkClusterType(ClusterType.CTP)
+                    .checkPosition());
     }
     
     /**
@@ -146,7 +205,11 @@
      */
     public void testGTPClusterer() {
         runClustererTest("GTPClusterer", 
-                new ClustererTestSetup().writeLcioFile().checkSeedHit().checkClusterType(ClusterType.GTP));
+                new ClustererTestSetup()
+                    .writeLcioFile()
+                    .checkClusterType(ClusterType.GTP)
+                    .checkHitEnergy()
+                    .checkPosition());
     }
     
     /**
@@ -184,14 +247,14 @@
         clusterDriver.setInputHitCollectionName("EcalHits");       
         clusterDriver.setOutputClusterCollectionName(clusterCollectionName);
         clusterDriver.setRaiseErrorNoHitCollection(true);
-        clusterDriver.setCalculateProperties(true);
-        clusterDriver.setSortHits(true);
+        clusterDriver.setCalculateProperties(setup.calculateProperties);
+        clusterDriver.setSortHits(setup.sortHits);              
         clusterDriver.setApplyCorrections(setup.applyCorrections);
         clusterDriver.getLogger().setLevel(Level.CONFIG);
         loop.add(clusterDriver);                         
         
         // This Driver generates plots and the output LCIO file.
-        loop.add(new ClusterCheckDriver(clusterCollectionName, setup.checkSeedHit, setup.clusterType));
+        loop.add(new ClusterCheckDriver(clusterCollectionName, setup));
         
         if (setup.writeLcioFile) {
             loop.add(new LCIODriver(testOutputDir.getPath() + File.separator + clustererName + ".slcio"));
@@ -233,30 +296,36 @@
         IHistogram1D shapeParam3H1D;
         ICloud1D ithetaC1D;
         ICloud1D iphiC1D;
+        ICloud1D nParticlesC1D;
+        ICloud1D particleEnergyC1D;
         IHistogram2D particleVsClusterEnergyH2D;
         IHistogram1D particleMinusClusterEnergyH1D;
+        ICloud2D clusterPositionC2D;
+        ICloud2D highestParticleEnergyVsClusterEnergyC2D;
+        ICloud2D clusterVsHitCountC2d;
+        IHistogram1D earliestHitTimeH1D;
+        IHistogram1D latestHitTimeH1D;
         String clusterCollectionName;
-        String clustererName;        
-        boolean checkSeedHit = true;
-        ClusterType clusterType;
-        
-        ClusterCheckDriver(String clusterCollectionName, boolean checkSeedHit, ClusterType clusterType) {
+        String clustererName;
+        
+        ClustererTestSetup setup;
+        
+        ClusterCheckDriver(String clusterCollectionName, ClustererTestSetup setup) {
             this.clusterCollectionName = clusterCollectionName;
-            this.checkSeedHit = checkSeedHit;
-            this.clusterType = clusterType;
+            this.setup = setup;
         }        
         
         public void startOfData() {
             energyH1D = aida.histogram1D(clusterCollectionName + "/Cluster Energy", 300, 0.0, 3.0);
             uncorrectedEnergyH1D = aida.histogram1D(clusterCollectionName + "/Uncorrected Cluster Energy", 200, 0.0, 2.0);
-            countH1D = aida.histogram1D(clusterCollectionName + "/Cluster Count", 10, -0.5, 9.5);
+            countH1D = aida.histogram1D(clusterCollectionName + "/Cluster Count", 20, -0.5, 19.5);
             sizeH1D = aida.histogram1D(clusterCollectionName + "/Cluster Size", 30, 0.5, 30.5);
             highestHitEnergyH1D = aida.histogram1D(clusterCollectionName + "/Highest Hit Energy", 300, 0.0, 1.5);
-            hitEnergyH1D = aida.histogram1D(clusterCollectionName + "/Hit Energy", 300, 0.0, 1.5);
+            hitEnergyH1D = aida.histogram1D(clusterCollectionName + "/Hit Energy", 600, 0.0, 1.5);
             rawVsCorrectedH2D = aida.histogram2D(clusterCollectionName + "/Raw vs Corrected Energy", 100, 0.0, 2.0, 100, 0.0, 2.0);
-            positionXH1D = aida.histogram1D(clusterCollectionName + "/Position X", 300, 0., 1500.0);
-            positionYH1D = aida.histogram1D(clusterCollectionName + "/Position Y", 500, 0., 1000.0);
-            positionZH1D = aida.histogram1D(clusterCollectionName + "/Position Z", 1000, 0, 2000.0);
+            positionXH1D = aida.histogram1D(clusterCollectionName + "/Position X", 500, 0., 500.0);
+            positionYH1D = aida.histogram1D(clusterCollectionName + "/Position Y", 500, 0., 100.0);
+            positionZH1D = aida.histogram1D(clusterCollectionName + "/Position Z", 200, 1400., 1500.0);
             shapeParam1H1D = aida.histogram1D(clusterCollectionName + "/Shape Param 1", 500, -5, 95);
             shapeParam2H1D = aida.histogram1D(clusterCollectionName + "/Shape Param 2", 520, -10, 250);
             shapeParam3H1D = aida.histogram1D(clusterCollectionName + "/Shape Param 3", 520, -10, 250);
@@ -264,47 +333,102 @@
             particleMinusClusterEnergyH1D = aida.histogram1D(clusterCollectionName + "/MCParticle Minus Cluster E", 200, -2.0, 2.0);
             ithetaC1D = aida.cloud1D(clusterCollectionName + "/ITheta");
             iphiC1D = aida.cloud1D(clusterCollectionName + "/IPhi");
+            clusterPositionC2D = aida.cloud2D(clusterCollectionName + "/Position XY", Integer.MAX_VALUE);
+            nParticlesC1D = aida.cloud1D(clusterCollectionName + "/MCParticle Count");
+            particleEnergyC1D = aida.cloud1D(clusterCollectionName + "/MCParticle Total Energy");
+            highestParticleEnergyVsClusterEnergyC2D = aida.cloud2D(clusterCollectionName + "/Highest Particle E vs Cluster E");
+            earliestHitTimeH1D = aida.histogram1D(clusterCollectionName + "/Earliest Hit Time", 500, 0., 500.);
+            latestHitTimeH1D = aida.histogram1D(clusterCollectionName + "/Latest Hit Time", 500, 0., 500.);
+            clusterVsHitCountC2d = aida.cloud2D(clusterCollectionName + "/Cluster Vs Hit Count");
         }
         
         public void process(EventHeader event) {
             List<Cluster> clusters = event.get(Cluster.class, this.clusterCollectionName);
             for (Cluster cluster : clusters) {
-                assertTrue("The cluster energy is invalid.", cluster.getEnergy() > 0.);
-                assertTrue("The cluster has no hits.", !cluster.getCalorimeterHits().isEmpty());
-                if (checkSeedHit) {
-                    assertEquals("First hit is not seed.", cluster.getCalorimeterHits().get(0), ClusterUtilities.findHighestEnergyHit(cluster));
-                }
-                assertTrue("Cluster properties not calculated.", !((BaseCluster)cluster).needsPropertyCalculation());
-                if (clusterType != null) {
-                    assertEquals("Cluster type is not correct.", clusterType, ClusterType.getClusterType(cluster.getType()));
-                }
-                energyH1D.fill(cluster.getEnergy());
-                double rawEnergy = ClusterUtilities.computeRawEnergy(cluster);
-                uncorrectedEnergyH1D.fill(rawEnergy);
-                sizeH1D.fill(cluster.getCalorimeterHits().size());
-                rawVsCorrectedH2D.fill(rawEnergy, cluster.getEnergy());
-                for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
-                    hitEnergyH1D.fill(hit.getCorrectedEnergy());
-                }                
-                highestHitEnergyH1D.fill(ClusterUtilities.findHighestEnergyHit(cluster).getCorrectedEnergy());
-                positionXH1D.fill(Math.abs(cluster.getPosition()[0]));
-                positionYH1D.fill(Math.abs(cluster.getPosition()[1]));
-                positionZH1D.fill(Math.abs(cluster.getPosition()[2]));
-                shapeParam1H1D.fill(cluster.getShape()[0]);
-                shapeParam2H1D.fill(cluster.getShape()[1]);
-                shapeParam3H1D.fill(cluster.getShape()[2]);
-                iphiC1D.fill(Math.toDegrees(cluster.getIPhi()));
-                ithetaC1D.fill(Math.toDegrees(cluster.getITheta()));                                
                 
-                Set<MCParticle> particles = ClusterUtilities.findMCParticles(cluster);
-                double particleEnergy = 0;
-                for (MCParticle particle : particles) {
-                    particleEnergy += particle.getEnergy();
-                }
-                particleVsClusterEnergyH2D.fill(particleEnergy, cluster.getEnergy());
-                particleMinusClusterEnergyH1D.fill(particleEnergy - cluster.getEnergy());
+                // Test assertions.
+                checkCluster(cluster);                
+                
+                // Fill plots.
+                fillClusterPlots(cluster);
             }            
             countH1D.fill(clusters.size());
+            clusterVsHitCountC2d.fill(clusters.size(), ClusterUtilities.getHits(clusters).size());
+        }
+
+        /**
+         * @param cluster
+         */
+        private void fillClusterPlots(Cluster cluster) {
+            energyH1D.fill(cluster.getEnergy());
+            double rawEnergy = ClusterUtilities.computeRawEnergy(cluster);
+            uncorrectedEnergyH1D.fill(rawEnergy);
+            sizeH1D.fill(cluster.getCalorimeterHits().size());
+            rawVsCorrectedH2D.fill(rawEnergy, cluster.getEnergy());
+            for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
+                hitEnergyH1D.fill(hit.getCorrectedEnergy());
+            }                
+            highestHitEnergyH1D.fill(ClusterUtilities.findHighestEnergyHit(cluster).getCorrectedEnergy());
+            positionXH1D.fill(Math.abs(cluster.getPosition()[0]));
+            positionYH1D.fill(Math.abs(cluster.getPosition()[1]));
+            positionZH1D.fill(Math.abs(cluster.getPosition()[2]));
+            shapeParam1H1D.fill(cluster.getShape()[0]);
+            shapeParam2H1D.fill(cluster.getShape()[1]);
+            shapeParam3H1D.fill(cluster.getShape()[2]);
+            iphiC1D.fill(Math.toDegrees(cluster.getIPhi()));
+            ithetaC1D.fill(Math.toDegrees(cluster.getITheta()));                                
+            clusterPositionC2D.fill(cluster.getPosition()[0], cluster.getPosition()[1]);
+            
+            //Map<MCParticle, List<SimCalorimeterHit>> particleHitMap = ClusterUtilities.createParticleHitMap(cluster);
+            
+            Set<MCParticle> particles = ClusterUtilities.findMCParticles(cluster);
+            double particleEnergy = 0;
+            double highestParticleEnergy = Double.MIN_VALUE;
+            for (MCParticle particle : particles) {
+                particleEnergy += particle.getEnergy();
+                if (particle.getEnergy() > highestParticleEnergy) {
+                    highestParticleEnergy = particle.getEnergy();
+                }
+            }
+            particleVsClusterEnergyH2D.fill(particleEnergy, cluster.getEnergy());
+            particleMinusClusterEnergyH1D.fill(particleEnergy - cluster.getEnergy());
+            nParticlesC1D.fill(particles.size());
+            particleEnergyC1D.fill(particleEnergy);
+            highestParticleEnergyVsClusterEnergyC2D.fill(highestParticleEnergy, cluster.getEnergy());
+
+            List<CalorimeterHit> hitsForTime = new ArrayList<CalorimeterHit>();
+            hitsForTime.addAll(cluster.getCalorimeterHits());
+            Collections.sort(hitsForTime, new CalorimeterHit.TimeComparator());
+            earliestHitTimeH1D.fill(hitsForTime.get(0).getTime());
+            latestHitTimeH1D.fill(hitsForTime.get(hitsForTime.size() - 1).getTime());
+        }
+
+        /**
+         * @param cluster
+         */
+        private void checkCluster(Cluster cluster) {
+            assertTrue("The cluster energy is invalid.", cluster.getEnergy() > 0.);
+            assertTrue("The cluster has no hits.", !cluster.getCalorimeterHits().isEmpty());
+            if (setup.checkSeedHit) {
+                assertEquals("First hit is not seed.", cluster.getCalorimeterHits().get(0), ClusterUtilities.findHighestEnergyHit(cluster));
+            }
+            if (setup.checkPropCalc) {
+                assertTrue("Cluster properties not calculated.", !((BaseCluster)cluster).needsPropertyCalculation());
+            }
+            if (setup.clusterType != null) {
+                assertEquals("Cluster type is not correct.", setup.clusterType, ClusterType.getClusterType(cluster.getType()));
+            }
+            if (setup.checkHitEnergy) {
+                for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
+                    assertTrue("Hit energy " + hit.getCorrectedEnergy() + " is <= 0.", hit.getCorrectedEnergy() > 0.);
+                }
+            }
+            if (setup.checkPosition) {
+                double[] position = cluster.getPosition();
+                assertTrue("Position X is invalid.", Math.abs(position[0]) < 400. && position[0] != 0.);
+                assertTrue("Position Y is invalid.", Math.abs(position[1]) > 25. && Math.abs(position[1]) < 90.);
+                assertTrue("Position Z is invalid.", position[2] > 1460. && position[2] < 1480.);
+            }
         }              
     }
     

Top of Message | Previous Page | Permalink

Advanced Options


Options

Log In

Log In

Get Password

Get Password


Search Archives

Search Archives


Subscribe or Unsubscribe

Subscribe or Unsubscribe


Archives

November 2017
August 2017
July 2017
January 2017
December 2016
November 2016
October 2016
September 2016
August 2016
July 2016
June 2016
May 2016
April 2016
March 2016
February 2016
January 2016
December 2015
November 2015
October 2015
September 2015
August 2015
July 2015
June 2015
May 2015
April 2015
March 2015
February 2015
January 2015
December 2014
November 2014
October 2014
September 2014
August 2014
July 2014
June 2014
May 2014
April 2014
March 2014
February 2014
January 2014
December 2013
November 2013

ATOM RSS1 RSS2



LISTSERV.SLAC.STANFORD.EDU

Secured by F-Secure Anti-Virus CataList Email List Search Powered by the LISTSERV Email List Manager

Privacy Notice, Security Notice and Terms of Use