Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ClusterSharingAlgorithmExcludingTargets.java+40added 1.1
ConeClusterSharingAlgorithm.java+55added 1.1
MultipleClusterSharingAlgorithm.java+45added 1.1
FindSubClusters.java+38-51.1 -> 1.2
ReclusterDTreeDriver.java+150-1131.28 -> 1.29
ReclusterDriver.java+180-421.25 -> 1.26
+508-160
3 added + 3 modified, total 6 files
MJC: (contrib) Updates to PFA

lcsim/src/org/lcsim/contrib/uiowa
ClusterSharingAlgorithmExcludingTargets.java added at 1.1
diff -N ClusterSharingAlgorithmExcludingTargets.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ClusterSharingAlgorithmExcludingTargets.java	12 Jul 2008 06:49:52 -0000	1.1
@@ -0,0 +1,40 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.event.*;
+
+/**
+ * Wrapper class, taking another ClusterSharingAlgorithm and applying it to a
+ * subset of supplied targets. This allows the user to exclude certain target
+ * clusters. For example, suppose that you have a ClusterSharingAlgorithm
+ * which shouldn't be applied to photons, but your list of targets will include
+ * photons. For that case you can prevent sharing with the photons by supplying
+ * them as a List<Cluster> to be excluded.
+ *
+ */
+
+public class ClusterSharingAlgorithmExcludingTargets implements ClusterSharingAlgorithm 
+{
+    protected ClusterSharingAlgorithm m_alg;
+    protected Collection<Cluster> m_targetsToExclude;
+
+    /** Constructor. */
+    public ClusterSharingAlgorithmExcludingTargets(ClusterSharingAlgorithm alg, Collection<Cluster> targetsToExclude) {
+	m_alg = alg;
+	m_targetsToExclude = targetsToExclude;
+    }
+
+    /** Interface: Share cluster. */
+    public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
+	List<Cluster> allowedTargets = new Vector<Cluster>();
+	for (Cluster clus : clusterTargets) {
+	    if (m_targetsToExclude.contains(clus)) {
+		// Don't use this target
+	    } else {
+		allowedTargets.add(clus);
+	    }
+	}
+	return m_alg.shareCluster(clusterToShare, allowedTargets);
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
ConeClusterSharingAlgorithm.java added at 1.1
diff -N ConeClusterSharingAlgorithm.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ConeClusterSharingAlgorithm.java	12 Jul 2008 06:49:52 -0000	1.1
@@ -0,0 +1,55 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.event.*;
+
+public class ConeClusterSharingAlgorithm implements ClusterSharingAlgorithm {
+
+    // Drops off as 1/r^3
+    double m_scaleOK;
+    double m_scaleMin;
+    public ConeClusterSharingAlgorithm(double scaleOK, double scaleMin) {
+	if (scaleMin >= scaleOK) { throw new AssertionError("Inconsistent parameters"); }
+	if (scaleMin <= 0.0) { throw new AssertionError("Min cos(theta) must be > 0"); }
+	m_scaleMin = scaleMin;
+	m_scaleOK = scaleOK;
+    }
+
+    public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
+	Map<Cluster,Double> outputMap = new HashMap<Cluster,Double>();
+	Hep3Vector clus1Position = new BasicHep3Vector(clusterToShare.getPosition());
+	for (Cluster target : clusterTargets) {
+	    Hep3Vector clus2Position = new BasicHep3Vector(target.getPosition());
+	    Hep3Vector positionOfInnerCluster = null;
+	    Hep3Vector positionOfOuterCluster = null;
+	    if (clus2Position.magnitude() > clus1Position.magnitude()) {
+		positionOfInnerCluster = clus1Position;
+		positionOfOuterCluster = clus2Position;
+	    } else {
+		positionOfInnerCluster = clus2Position;
+		positionOfOuterCluster = clus1Position;
+	    }
+	    Hep3Vector displacementVector = VecOp.sub(positionOfOuterCluster, positionOfInnerCluster);
+	    double dotProduct = VecOp.dot(VecOp.unit(displacementVector), VecOp.unit(positionOfInnerCluster));
+	    double score = 0.0;
+	    if (dotProduct <= 1.0 && dotProduct > m_scaleOK) {
+		// In "good" region, scoring 1.0 to 0.7
+		double offset = (1.0 - dotProduct);
+		double normalizedOffset = offset / (1.0 - m_scaleOK);
+		score = 1.0 - 0.3*normalizedOffset;
+		if (score > 1.0 || score < 0.7) { throw new AssertionError("Calculation error"); }
+	    } else if (dotProduct >= m_scaleOK && dotProduct < m_scaleMin) {
+		// In "poor" region, scoring 0.7 to 0.0
+		double offset = (m_scaleOK - dotProduct);
+		double normalizedOffset = offset / (m_scaleOK - m_scaleMin);
+		score = 0.7 - 0.7*normalizedOffset;
+		if (score > 0.7 || score < 0.0) { throw new AssertionError("Calculation error"); }
+	    }
+	    if (score > 0.0) {
+		outputMap.put(target, new Double(score));
+	    }
+	}
+	return outputMap;
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
MultipleClusterSharingAlgorithm.java added at 1.1
diff -N MultipleClusterSharingAlgorithm.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MultipleClusterSharingAlgorithm.java	12 Jul 2008 06:49:52 -0000	1.1
@@ -0,0 +1,45 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.event.*;
+
+/** Combine >=1 cluster sharing algorithms. */
+
+public class MultipleClusterSharingAlgorithm implements ClusterSharingAlgorithm 
+{
+    List<ClusterSharingAlgorithm> m_algs;
+
+    /** Simple constructor. */
+    public MultipleClusterSharingAlgorithm() {
+	m_algs = new Vector<ClusterSharingAlgorithm>();
+    }
+
+    /** Supply initial list. */
+    public MultipleClusterSharingAlgorithm(Collection<ClusterSharingAlgorithm> algorithms) {
+	m_algs = new Vector<ClusterSharingAlgorithm>();
+	m_algs.addAll(algorithms);
+    }
+
+    /** Add a ClusterSharingAlgorithm to the list to use. */
+    public void addAlgorithm(ClusterSharingAlgorithm alg) {
+	m_algs.add(alg);
+    }
+
+    public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
+	Map<Cluster,Double> outputScoreMap = new HashMap<Cluster,Double>();
+	for (ClusterSharingAlgorithm alg : m_algs) {
+	    Map<Cluster,Double> currentScoreMap = alg.shareCluster(clusterToShare, clusterTargets);
+	    for (Cluster target : currentScoreMap.keySet()) {
+		Double score = currentScoreMap.get(target);
+		if (score == null) { throw new AssertionError("Null score"); }
+		if (score == 0.0) { throw new AssertionError("Zero score"); }
+		Double previousScore = outputScoreMap.get(target);
+		if (previousScore==null || score > previousScore) {
+		    outputScoreMap.put(target, score);
+		}
+	    }
+	}
+	return outputScoreMap;
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
FindSubClusters.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- FindSubClusters.java	6 Jun 2008 00:07:59 -0000	1.1
+++ FindSubClusters.java	12 Jul 2008 06:49:52 -0000	1.2
@@ -17,6 +17,16 @@
 
 public class FindSubClusters extends Driver
 {
+    // TEST
+    int m_dU = 0;
+    int m_dV = 0;
+    int m_dL = 0;
+    public void setNNrange(int dU, int dV, int dL) {
+	m_dU = dU;
+	m_dV = dV;
+	m_dL = dL;
+    }
+
     public FindSubClusters(String inputLargeClusterList, 
 			   double newMipFinderRadius,
 			   int minHitsToBeTreatedAsCluster,
@@ -106,6 +116,9 @@
 	    } 
 	    List<Cluster> mipsNewInThisTree = findMipsNew(hitsInTree);
 	    List<Cluster> clumpsInThisTree = findClumps(hitsInTree);
+	    List<Cluster> tightNeighbourClustersInThisTree = findNNClusters(hitsInTree);
+	    // Treat the NN clusters as clumps for now
+	    clumpsInThisTree.addAll(tightNeighbourClustersInThisTree);
 	    // Record what we found
 	    targetList.addAll(mipsOldInThisTree);
 	    targetList.addAll(mipsNewInThisTree);
@@ -134,11 +147,12 @@
 	}
 
 	// Write out:
-	event.put(m_outputOldMipListName, mipsOld);
-	event.put(m_outputNewMipListName, mipsNew);
-	event.put(m_outputClumpListName, clumps);
-	event.put(m_outputBlockListName, blocks);
-	event.put(m_outputLeftoverHitClustersName, leftoverHitClusters);
+	int flag = 1<<org.lcsim.util.lcio.LCIOConstants.CLBIT_HITS;
+	event.put(m_outputOldMipListName, mipsOld, Cluster.class, flag);
+	event.put(m_outputNewMipListName, mipsNew, Cluster.class, flag);
+	event.put(m_outputClumpListName, clumps, Cluster.class, flag);
+	event.put(m_outputBlockListName, blocks, Cluster.class, flag);
+	event.put(m_outputLeftoverHitClustersName, leftoverHitClusters, Cluster.class, flag);
 	// These are a bit fiddly:
 	event.put(m_outputMapTreeToTargetsName, targetsInTree);
 	event.put(m_outputMapSharedToTreeName, treeOfSharedCluster);
@@ -203,6 +217,25 @@
 	return clumpClusters;
     }
 
+    protected List<Cluster> findNNClusters(HitMap unusedHits) {
+	if (m_dU>0 && m_dV>0 && m_dL>0) {
+	    int min_cells = m_minHitsToBeTreatedAsCluster/2;
+	    Clusterer neighbourClusterer = new org.lcsim.recon.cluster.nn.NearestNeighborClusterer(m_dU, m_dV, m_dL, min_cells, 0.0);
+	    List<Cluster> output = neighbourClusterer.createClusters(unusedHits);
+	    for (Cluster clus : output) {
+		if (clus.getCalorimeterHits().size() == 0) { throw new AssertionError("clump has no hits"); }
+		if (clus.getCalorimeterHits().contains(null)) { throw new AssertionError("null hit in clump"); }
+		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		    unusedHits.remove(hit.getCellID());
+		}
+	    }
+	    return output;
+	} else {
+	    // Do nothing -- return empty vector
+	    return(new Vector<Cluster>());
+	}
+    }
+
     protected void removePoorQualityMips(Collection<Cluster> mipList) {
 	MipQualityDecision check = new MipQualityDecision();
 	List<Cluster> mipsToRemove = new Vector<Cluster>();

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.28 -> 1.29
diff -u -r1.28 -r1.29
--- ReclusterDTreeDriver.java	8 Jul 2008 16:41:12 -0000	1.28
+++ ReclusterDTreeDriver.java	12 Jul 2008 06:49:53 -0000	1.29
@@ -34,7 +34,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.28 2008/07/08 16:41:12 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.29 2008/07/12 06:49:53 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -51,6 +51,7 @@
     protected boolean m_jetDebug = false;
     protected boolean m_writeExtraEventOutput = false;
 
+    protected boolean m_findExtraNNClusters = true;
     protected boolean m_oldMipFinderCrossesTrees = true;
     protected boolean m_useNewMipFinder = true;
     protected boolean m_useOldMipFinder = true;
@@ -65,23 +66,29 @@
     protected boolean m_allowPhotonSeeds = true;
     protected boolean m_splitPhotonSeeds = true;
     protected boolean m_allPhotonsAreValidSeeds = true;
-
-    protected boolean m_checkSharedHitsForPunchThrough = true;
+    
     protected boolean m_allowNeutralCalibForEoverP = false;
 
-    protected int m_minHitsToBeTreatedAsClusterECAL = 20;
-    protected int m_minHitsToBeTreatedAsClusterHCAL = 50;
+    protected int m_minHitsToBeTreatedAsClusterECAL = 15;
+    protected int m_minHitsToBeTreatedAsClusterHCAL = 20;
     protected double m_newMipFinderRadiusECAL = 20.0;
     protected double m_newMipFinderRadiusHCAL = 50.0;
-    protected int m_punchThroughLayers = 5;
-    protected int m_punchThroughHitMinimum = 4;
+    protected boolean m_allowSharingOfIsolatedHits = true;
 
     protected double m_jetScoreThreshold = 0.7; // don't hard-code
     protected double m_jetTolerance = 1.5; // don't hard-code
 
+    protected boolean m_fixSingleTracksWithCone = true;
+    protected boolean m_fixJetsWithCone = true;
+    protected boolean m_useSimpleConeForReassignment = true;
+
     public void writeExtraEventOutput(boolean writeExtra) {
 	m_writeExtraEventOutput = writeExtra;
     }
+
+    public void suspend() {
+	super.suspend();
+    }
     
     public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList) {
 	System.out.println("ReclusterDTreeDriver version 0.2");
@@ -108,6 +115,10 @@
 	    clusDriverECAL.enableBarrelEndcapCrossing("EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary");
 	    clusDriverHCAL.enableBarrelEndcapCrossing("HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary");
 	}
+	if (m_findExtraNNClusters) {
+	    clusDriverECAL.setNNrange(1,1,1);
+	    clusDriverHCAL.setNNrange(2,2,1);
+	}
 	add(clusDriverECAL);
 	add(clusDriverHCAL);
     }
@@ -207,7 +218,7 @@
 	if (m_allowPhotonSeeds) {
 	    allMatchableClusters.addAll(chargedHadronLikePhotons);
 	}
-	allMatchableClusters.addAll(leftoverHitClusters);
+	allMatchableClusters.addAll(leftoverHitClusters); // DANGER -- leftoverHitClusters can be large!
 	allMatchableClusters.addAll(treesWithNoStructure);
 	if (allMatchableClusters.contains(null)) { throw new AssertionError("Book-keeping error: null cluster in list allMatchableClusters"); }
 
@@ -460,35 +471,109 @@
 	Map<Track, Set<Track>> mapTrackToJet = null;
 
 	// Set up sharing
-	boolean proximityShareForSmallClusters = true;
+	boolean proximityShareForSmallClusters = false;
+	boolean proximityAndConeShareForSmallClusters = true;
+
 	boolean dTreeShareForHaloECAL = true;
+	boolean proximityAndConeShareForECAL = false;
+	boolean proximityAndConeAndDTreeShareForECAL = false;
+	boolean coneAndDTreeShareForECAL = false;
+
 	boolean dTreeShareForHaloHCAL = true;
 	boolean steveMipShareForHaloHCAL = false;
 	boolean taejeongMipShareForHaloHCAL = false;
+	boolean proximityAndConeShareForHCAL = false;
+	boolean proximityAndConeAndDTreeShareForHCAL = false;
+	boolean coneAndDTreeShareForHCAL = false;
+
+	double maxDistanceForSmallClusters = 250.0; // 25cm isolation cut-off
+	double maxDistanceForDTreeClustersECAL = 150.0; // 15cm isolation (shorter since DTree sharing exists as backup)
+	double maxDistanceForDTreeClustersHCAL = 200.0; // 20cm isolation (shorter since DTree sharing exists as backup)
+	double maxDistanceForProximityClustersECAL = 100.0; // 10cm isolation (shorter since other sharing exists as backup)
+	double maxDistanceForProximityClustersHCAL = 150.0; // 10cm isolation (shorter since other sharing exists as backup)
+	if (m_allowSharingOfIsolatedHits) {
+	    maxDistanceForSmallClusters = 99999.9; // effectively no cut-off
+	    maxDistanceForDTreeClustersECAL = 99999.9; // effectively no cut-off
+	    maxDistanceForDTreeClustersHCAL = 99999.9; // effectively no cut-off
+	    maxDistanceForProximityClustersECAL = 99999.9; // effectively no cut-off
+	    maxDistanceForProximityClustersHCAL = 99999.9; // effectively no cut-off
+	}
+
+	boolean excludePhotonsFromCone = true;
+
 	List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
 	// Small clusters
 	if (proximityShareForSmallClusters) {
-	    ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
+	    ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, maxDistanceForSmallClusters);
 	    SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAlgForSmallClusters);
 	    sharedSmallDTreeClusters.createShares(linkableClusters);
 	    sharedSmallDTreeClusters.rebuildHints();
 	    allSharedClusters.add(sharedSmallDTreeClusters);
+	} else if (proximityAndConeShareForSmallClusters) {
+	    MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
+	    proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(40.0, maxDistanceForSmallClusters));
+	    if (excludePhotonsFromCone) {
+		proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
+	    } else {
+		proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+	    }
+	    SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAndConeAlg);
+	    sharedSmallDTreeClusters.createShares(linkableClusters);
+	    sharedSmallDTreeClusters.rebuildHints();
+	    allSharedClusters.add(sharedSmallDTreeClusters);
 	} else {
 	    throw new AssertionError("Unhandled case!");
 	}
 	// ECAL halo
 	if (dTreeShareForHaloECAL) {
-	    DTreeClusterSharingAlgorithm dTreeSharingAlgECAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, 150.0);
+	    DTreeClusterSharingAlgorithm dTreeSharingAlgECAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForDTreeClustersECAL);
 	    SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, dTreeSharingAlgECAL);
 	    sharedLeftoverHitClustersECAL.createShares(linkableClusters);
 	    sharedLeftoverHitClustersECAL.rebuildHints();
 	    allSharedClusters.add(sharedLeftoverHitClustersECAL);
+	} else if (proximityAndConeShareForECAL) {
+	    MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
+	    proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(20.0, maxDistanceForDTreeClustersECAL));
+	    if (excludePhotonsFromCone) {
+		proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
+	    } else {
+		proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+	    }
+	    SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, proximityAndConeAlg);
+	    sharedLeftoverHitClustersECAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersECAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersECAL);
+	} else if (proximityAndConeAndDTreeShareForECAL) {
+	    MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
+	    proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(20.0, maxDistanceForProximityClustersECAL));
+	    if (excludePhotonsFromCone) {
+		proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
+	    } else {
+		proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+	    }
+	    proximityAndConeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForProximityClustersECAL));
+	    SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, proximityAndConeAlg);
+	    sharedLeftoverHitClustersECAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersECAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersECAL);
+	} else if (coneAndDTreeShareForECAL) {
+	    MultipleClusterSharingAlgorithm coneAndDTreeAlg = new MultipleClusterSharingAlgorithm();
+	    if (excludePhotonsFromCone) {
+		coneAndDTreeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
+	    } else {
+		coneAndDTreeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+	    }
+	    coneAndDTreeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForProximityClustersECAL));
+	    SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, coneAndDTreeAlg);
+	    sharedLeftoverHitClustersECAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersECAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersECAL);
 	} else {
 	    throw new AssertionError("Unhandled case!");
 	}
 	// HCAL halo
 	if (dTreeShareForHaloHCAL) {
-	    DTreeClusterSharingAlgorithm dTreeSharingAlgHCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, 200.0);
+	    DTreeClusterSharingAlgorithm dTreeSharingAlgHCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, maxDistanceForDTreeClustersHCAL);
 	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, dTreeSharingAlgHCAL);
 	    sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
 	    sharedLeftoverHitClustersHCAL.rebuildHints();
@@ -526,6 +611,43 @@
 	    sharedLeftoverHitClustersHCAL.createShares(tmpSeedList); // TEST
 	    sharedLeftoverHitClustersHCAL.rebuildHints();
 	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+	} else if (proximityAndConeShareForHCAL) {
+	    MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
+	    proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(50.0, maxDistanceForProximityClustersHCAL));
+	    if (excludePhotonsFromCone) {
+		proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
+	    } else {
+		proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+	    }
+	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, proximityAndConeAlg);
+	    sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersHCAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+	} else if (proximityAndConeAndDTreeShareForHCAL) {
+	    MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
+	    proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(40.0, maxDistanceForProximityClustersHCAL));
+	    if (excludePhotonsFromCone) {
+		proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
+	    } else {
+		proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+	    }
+	    proximityAndConeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 40.0, maxDistanceForProximityClustersHCAL));
+	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, proximityAndConeAlg);
+	    sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersHCAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+	} else if (coneAndDTreeShareForHCAL) {
+	    MultipleClusterSharingAlgorithm coneAndDTreeAlg = new MultipleClusterSharingAlgorithm();
+	    if (excludePhotonsFromCone) {
+		coneAndDTreeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
+	    } else {
+		coneAndDTreeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+	    }
+	    coneAndDTreeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 40.0, maxDistanceForProximityClustersHCAL));
+	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneAndDTreeAlg);
+	    sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersHCAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
 	} else {
 	    throw new AssertionError("Unhandled case!");
 	}
@@ -628,11 +750,22 @@
 		}
 	    }
 	}
-	boolean m_fixSingleTracksWithCone = true;
-	boolean m_fixJetsWithCone = true;
+
 	LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
 	findCluster.process(m_event); // picks up geometry
-	ReassignClustersAlgorithm algorithm = new ConeReassignmentAlgorithm(1.0, findCluster);
+	ShowerPointFinder showerFinder = new ShowerPointFinder(findCluster, newMapTrackToShowerComponents, mips, allHits);
+	Map<Track,BasicCluster> MapTrkToMIP = showerFinder.findMips(tracksSortedByMomentum);
+	event.put("ShowerFinderMapTrackToMip", MapTrkToMIP);
+	List<Cluster> preShowerMips = new Vector<Cluster>();
+	preShowerMips.addAll(MapTrkToMIP.values());
+	event.put("ShowerFinderMips", preShowerMips);
+
+	ReassignClustersAlgorithm algorithm = null;
+	if (m_useSimpleConeForReassignment) {
+	    algorithm = new ConeReassignmentAlgorithm(1.0, findCluster);
+	} else {
+	    algorithm = new MIPReassignmentAlgorithm(0.79, findCluster, MapTrkToMIP);
+	}
 	if (m_fixSingleTracksWithCone) {
 	    // First, try to fix the simplest case: single tracks with E/p < 1
 	    for (Track tr : tracksSortedByMomentum) {
@@ -653,6 +786,8 @@
 	    printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
 	    //	}
 
+	    makePlotsOfEoverP(newMapTrackToShowerComponents, newMapJetToShowerComponents, mapTrackToJet, allSharedClusters);
+
 	// Outputs
 	makeParticlesAndWriteOut(trackList, tracksSortedByMomentum, unmatchedTracksThatDontReachCalorimeter, mapOrigTrackToTweakedTrack,
 				 tracksMatchedToClusters, clustersMatchedToTracks,
@@ -1038,83 +1173,6 @@
 	return Double.NaN;
     }
 
-
-
-    protected int countHitsInSideEdgesOfHcal(Cluster clus, int nLayersToCheck) {
-	// Pick up detector geometry
-        Detector det = m_event.getDetector();
-        CylindricalCalorimeter hadBarrel = ((CylindricalCalorimeter) det.getSubdetectors().get("HADBarrel"));
-	double backZ = hadBarrel.getZMax();
-	double innerRadius = hadBarrel.getInnerRadius();
-	double tanTheta = innerRadius/backZ;
-
-	org.lcsim.geometry.IDDecoder id = hadBarrel.getIDDecoder();
-	if (id instanceof org.lcsim.geometry.segmentation.NonprojectiveCylinder) {
-	    org.lcsim.geometry.segmentation.NonprojectiveCylinder segmentation = (org.lcsim.geometry.segmentation.NonprojectiveCylinder) id;
-	    double gridZ = segmentation.getGridSizeZ();
-	    // We want to include at least (nLayersToCheck) layers.
-	    // The layer segmentation is (gridZ).
-	    // For a MIP travelling at a polar angle theta, this means
-	    // a Z range of at least (nLayersToCheck*gridZ)/tanTheta.
-	    double nLayersToCheck_double = nLayersToCheck;
-	    double rangeZ = nLayersToCheck_double * gridZ / tanTheta;
-	    double backCellsRange = (backZ - rangeZ); // minimum Z to be included
-	    Set<Long> backCellsFound = new HashSet<Long>();
-	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
-		double hitZ = hit.getPosition()[2];
-		if (Math.abs(hitZ) > backCellsRange) {
-		    backCellsFound.add(hit.getCellID());
-		}
-	    }
-	    return backCellsFound.size();
-	} else {
-	    throw new AssertionError("Help! Don't know how to handle barrel geometry of class "+id.getClass().getName());
-	}
-    }
-
-    protected int countHitsInLastLayersOfHcal(ReconstructedParticle part, int nLayersToCheck) {
-	return countHitsInLastLayersOfHcal(part.getClusters(), nLayersToCheck);
-    }
-    protected int countHitsInLastLayersOfHcal(Cluster clus, int nLayersToCheck) {
-	List<Cluster> tmpList = new Vector<Cluster>();
-	tmpList.add(clus);
-	return countHitsInLastLayersOfHcal(tmpList, nLayersToCheck);
-    }
-    protected int countHitsInLastLayersOfHcal(Collection<Cluster> clusters, int nLayersToCheck) {
-	// Pick up detector geometry
-        Detector det = m_event.getDetector();
-        CylindricalCalorimeter hadBarrel = ((CylindricalCalorimeter) det.getSubdetectors().get("HADBarrel"));
-        CylindricalCalorimeter hadEndcap = ((CylindricalCalorimeter) det.getSubdetectors().get("HADEndcap"));
-        int nLayersHadBarrel = hadBarrel.getLayering().getLayerCount();
-        int nLayersHadEndcap = hadEndcap.getLayering().getLayerCount();
-
-	// Scan for hits
-	Set<Long> hitsFoundInLastLayersOfHcal = new HashSet<Long>();
-	for (Cluster clus : clusters) {
-	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
-		int nLayersInSubdet = 0;
-		org.lcsim.geometry.Subdetector subdet = hit.getSubdetector();
-		String name = subdet.getName();
-		if (name.compareTo("HADBarrel")==0) {
-		    nLayersInSubdet = nLayersHadBarrel;
-		} else if (name.compareTo("HADEndcap")==0) {
-		    nLayersInSubdet = nLayersHadEndcap;
-		} else {
-		    // Not in HCAL
-		    continue;
-		}
-		org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
-		id.setID(hit.getCellID());
-		int layer = id.getLayer();
-		if (layer >= nLayersInSubdet-nLayersToCheck) {
-		    // In last n layers
-		    hitsFoundInLastLayersOfHcal.add(hit.getCellID());
-		}
-	    }
-	}
-	return hitsFoundInLastLayersOfHcal.size();
-    }
-
     protected BaseReconstructedParticle makePhoton(Cluster clus, List<SharedClusterGroup> allSharedClusters) {
 	// Create
 	BaseReconstructedParticle part = new BaseReconstructedParticle();
@@ -1580,27 +1638,6 @@
 	}
     }
 
-    boolean isPunchThrough(Collection<Cluster> showerComponents, List<SharedClusterGroup> allSharedClusters) {
-	Cluster clusterToCheckForPunchThrough = null;
-	if (m_checkSharedHitsForPunchThrough) {
-	    Cluster sharedHitCluster = makeClusterOfSharedHits(showerComponents, allSharedClusters);
-	    List<Cluster> showerComponentsPlusSharedHits = new Vector<Cluster>();
-	    showerComponentsPlusSharedHits.addAll(showerComponents);
-	    showerComponentsPlusSharedHits.add(sharedHitCluster);
-	    clusterToCheckForPunchThrough = makeCombinedCluster(showerComponentsPlusSharedHits);
-	} else {
-	    clusterToCheckForPunchThrough = makeCombinedCluster(showerComponents);
-	}
-	// Check for regular punch-through (shows up as hits in outermost layers of HCAL:
-	int hitCount = countHitsInLastLayersOfHcal(clusterToCheckForPunchThrough, m_punchThroughLayers);
-	boolean longitudinalPunchThrough = (hitCount >= m_punchThroughHitMinimum);
-	// Also check for tracks that punch through the sides of the barrel
-	int barrelEdgeHitCount = countHitsInSideEdgesOfHcal(clusterToCheckForPunchThrough, m_punchThroughLayers);
-	boolean transversePunchThrough = (barrelEdgeHitCount >= m_punchThroughHitMinimum);
-	return (longitudinalPunchThrough || transversePunchThrough);
-
-    }
-
     void scanForElectrons(List<Track> trackList, List<Cluster> photons, List<Cluster> electronClusters, List<Track> electronTracks) {
 	Map<Track,Cluster> electronCandidateMap = new HashMap<Track,Cluster>();
 	Set<Cluster> electronCandidateClusters = new HashSet<Cluster>();

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.25 -> 1.26
diff -u -r1.25 -r1.26
--- ReclusterDriver.java	8 Jul 2008 16:34:19 -0000	1.25
+++ ReclusterDriver.java	12 Jul 2008 06:49:53 -0000	1.26
@@ -37,7 +37,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.25 2008/07/08 16:34:19 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.26 2008/07/12 06:49:53 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -77,6 +77,10 @@
 
     protected boolean m_allowComponentsToStraddleLargeClusters = false;
 
+    protected int m_punchThroughLayers = 5;
+    protected int m_punchThroughHitMinimum = 4;
+    protected boolean m_checkSharedHitsForPunchThrough = true;
+
     protected ReclusterDriver() {
 	// Gah, debug only!
     }
@@ -2487,50 +2491,77 @@
 	}
     }	
 
-    private void makePlotsOfEoverP(Collection<ReconstructedParticle> chargedParticles,
-				   Map<Track, Set<Cluster>> newMapTrackToShowerComponents,
-				   List<SharedClusterGroup> allSharedClusters
-				   )
+    private void makePlotsOfEoverP(Collection<ReconstructedParticle> chargedParticles, Map<Track, Set<Cluster>> newMapTrackToShowerComponents, List<SharedClusterGroup> allSharedClusters) {
+	makePlotsOfEoverP(newMapTrackToShowerComponents, null, null, allSharedClusters);
+    }
+
+    protected void makePlotsOfEoverP(Map<Track, Set<Cluster>> newMapTrackToShowerComponents,
+				     Map<Set<Track>, Set<Cluster>> newMapJetToShowerComponents,
+				     Map<Track, Set<Track>> mapTrackToJet,
+				     List<SharedClusterGroup> allSharedClusters
+				     )
     {
-	for (ReconstructedParticle part : chargedParticles) {
-	    List<Track> tracks = part.getTracks();
-	    m_histo_trackMultiplicity.fill(tracks.size());
-	    Set<Cluster> showerComponentsForAllTracks = new HashSet<Cluster>();
-	    double trackMomentumTotal = 0.0;
-	    for (Track tr : tracks) {
-		Set<Cluster> showerComponentsForThisTrack = newMapTrackToShowerComponents.get(tr);
-		showerComponentsForAllTracks.addAll(showerComponentsForThisTrack);
-		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
-		trackMomentumTotal += trackMomentum;
-		m_histo_trackMomentum.fill(trackMomentum);
+	// Split into jet and non-jet tracks
+	Set<Set<Track>> jets = new HashSet<Set<Track>>();
+	if (newMapJetToShowerComponents != null) {
+	    jets = newMapJetToShowerComponents.keySet();
+	}
+	Set<Track> tracks = newMapTrackToShowerComponents.keySet();
+	Set<Track> nonJetTracks = new HashSet<Track>();
+	for (Track tr : tracks) {
+	    if (mapTrackToJet==null || mapTrackToJet.get(tr)==null) {
+		nonJetTracks.add(tr);
 	    }
-	    double clusterEnergy = energy(showerComponentsForAllTracks, allSharedClusters);
-	    m_histo_clusterEnergy.fill(clusterEnergy);
-	    m_histo_clusterEnergyVsMomentum.fill(trackMomentumTotal, clusterEnergy);
-	    double energyResidual = (clusterEnergy - trackMomentumTotal);
-	    m_histo_clusterEnergyResidual.fill(energyResidual);
-	    m_histo_clusterEnergyResidualVsMomentum.fill(trackMomentumTotal, energyResidual);
-	    double sigma = 0.7 * Math.sqrt(trackMomentumTotal);
-	    double normalizedEnergyResidual = energyResidual/sigma;
-	    m_histo_normalizedClusterEnergyResidual.fill(normalizedEnergyResidual);
-	    m_histo_normalizedClusterEnergyResidualVsMomentum.fill(trackMomentumTotal, normalizedEnergyResidual);
-	    double coreEffic = quoteEfficiency_T(tracks, showerComponentsForAllTracks);
-	    double corePurity = quotePurity_T(tracks, showerComponentsForAllTracks);
-	    double realEffic = quoteEfficiency_T(tracks, showerComponentsForAllTracks, allSharedClusters);
-	    double realPurity = quotePurity_T(tracks, showerComponentsForAllTracks, allSharedClusters);
-	    m_histo_coreEffic.fill(coreEffic);
-	    m_histo_corePurity.fill(corePurity);
-	    m_histo_realEffic.fill(realEffic);
-	    m_histo_realPurity.fill(realPurity);
-	    double correctedEnergyEstimate = clusterEnergy * realPurity / realEffic;
-	    double correctedEnergyEstimateResidual = correctedEnergyEstimate - trackMomentumTotal;
-	    double correctedEnergyEstimateNormalizedResidual = correctedEnergyEstimateResidual / sigma;
-	    m_histo_correctedEnergyEstimateResidual.fill(correctedEnergyEstimateResidual);
-	    m_histo_correctedEnergyEstimateResidualVsMomentum.fill(trackMomentumTotal, correctedEnergyEstimateResidual);
-	    m_histo_correctedEnergyEstimateNormalizedResidual.fill(correctedEnergyEstimateNormalizedResidual);
-	    m_histo_correctedEnergyEstimateNormalizedResidualVsMomentum.fill(trackMomentumTotal, correctedEnergyEstimateNormalizedResidual);
 	}
-	    
+	for (Track tr : nonJetTracks) {
+	    Set<Cluster> shower = newMapTrackToShowerComponents.get(tr);
+	    double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+	    double trackMomentumSigma = estimatedEnergyUncertainty(tr);
+	    makeSubPlotsOfEoverP(trackMomentum, trackMomentumSigma, shower, allSharedClusters);
+	    List<Track> tmpList = new Vector<Track>();
+	    tmpList.add(tr);
+	    makeSubPlotsOfPurity(tmpList, shower, allSharedClusters);
+	}
+	for (Set<Track> jet : jets) {
+	    Set<Cluster> shower = newMapJetToShowerComponents.get(jet);
+	    double jetMomentum = jetScalarMomentum(jet);
+	    double jetMomentumSigma = estimatedEnergyUncertainty(jet);
+	    makeSubPlotsOfEoverP(jetMomentum, jetMomentumSigma, shower, allSharedClusters);
+	    makeSubPlotsOfPurity(jet, shower, allSharedClusters);
+	}
+	
+    }
+
+    private void makeSubPlotsOfEoverP(double trackMomentumTotal, double sigma, Collection<Cluster> shower, List<SharedClusterGroup> allSharedClusters) {
+	m_histo_trackMomentum.fill(trackMomentumTotal);
+	double clusterEnergy = energy(shower, allSharedClusters);
+	m_histo_clusterEnergy.fill(clusterEnergy);
+	m_histo_clusterEnergyVsMomentum.fill(trackMomentumTotal, clusterEnergy);
+	double energyResidual = (clusterEnergy - trackMomentumTotal);
+	m_histo_clusterEnergyResidual.fill(energyResidual);
+	m_histo_clusterEnergyResidualVsMomentum.fill(trackMomentumTotal, energyResidual);
+	double normalizedEnergyResidual = energyResidual/sigma;
+	m_histo_normalizedClusterEnergyResidual.fill(normalizedEnergyResidual);
+	m_histo_normalizedClusterEnergyResidualVsMomentum.fill(trackMomentumTotal, normalizedEnergyResidual);
+	boolean punchThrough = isPunchThrough(shower, allSharedClusters);
+	if (!punchThrough) {
+	    m_histo_clusterEnergyVsMomentumNoPunchThrough.fill(trackMomentumTotal, clusterEnergy);
+	    m_histo_clusterEnergyResidualNoPunchThrough.fill(energyResidual);
+	    m_histo_clusterEnergyResidualVsMomentumNoPunchThrough.fill(trackMomentumTotal, energyResidual);
+	    m_histo_normalizedClusterEnergyResidualNoPunchThrough.fill(normalizedEnergyResidual);
+	    m_histo_normalizedClusterEnergyResidualVsMomentumNoPunchThrough.fill(trackMomentumTotal, normalizedEnergyResidual);
+	}
+
+    }
+    private void makeSubPlotsOfPurity(Collection<Track> tracks, Collection<Cluster> shower, List<SharedClusterGroup> allSharedClusters) {
+	double coreEffic = quoteEfficiency_T(tracks, shower);
+	double corePurity = quotePurity_T(tracks, shower);
+	double realEffic = quoteEfficiency_T(tracks, shower, allSharedClusters);
+	double realPurity = quotePurity_T(tracks, shower, allSharedClusters);
+	m_histo_coreEffic.fill(coreEffic);
+	m_histo_corePurity.fill(corePurity);
+	m_histo_realEffic.fill(realEffic);
+	m_histo_realPurity.fill(realPurity);
     }
 
     ITree m_tree = null;
@@ -2543,6 +2574,11 @@
     ICloud2D m_histo_clusterEnergyResidualVsMomentum;
     ICloud1D m_histo_normalizedClusterEnergyResidual;
     ICloud2D m_histo_normalizedClusterEnergyResidualVsMomentum;
+    ICloud2D m_histo_clusterEnergyVsMomentumNoPunchThrough;
+    ICloud1D m_histo_clusterEnergyResidualNoPunchThrough;
+    ICloud2D m_histo_clusterEnergyResidualVsMomentumNoPunchThrough;
+    ICloud1D m_histo_normalizedClusterEnergyResidualNoPunchThrough;
+    ICloud2D m_histo_normalizedClusterEnergyResidualVsMomentumNoPunchThrough;
     ICloud1D m_histo_coreEffic;
     ICloud1D m_histo_corePurity;
     ICloud1D m_histo_realEffic;
@@ -2560,10 +2596,18 @@
 	    m_histo_trackMomentum= m_histoFactory.createCloud1D("m_histo_trackMomentum");
 	    m_histo_clusterEnergy= m_histoFactory.createCloud1D("m_histo_clusterEnergy");
 	    m_histo_clusterEnergyVsMomentum= m_histoFactory.createCloud2D("m_histo_clusterEnergyVsMomentum");
+
 	    m_histo_clusterEnergyResidual= m_histoFactory.createCloud1D("m_histo_clusterEnergyResidual");
 	    m_histo_clusterEnergyResidualVsMomentum= m_histoFactory.createCloud2D("m_histo_clusterEnergyResidualVsMomentum");
 	    m_histo_normalizedClusterEnergyResidual= m_histoFactory.createCloud1D("m_histo_normalizedClusterEnergyResidual");
 	    m_histo_normalizedClusterEnergyResidualVsMomentum= m_histoFactory.createCloud2D("m_histo_normalizedClusterEnergyResidualVsMomentum");
+
+	    m_histo_clusterEnergyVsMomentumNoPunchThrough= m_histoFactory.createCloud2D("m_histo_clusterEnergyVsMomentumNoPunchThrough");
+	    m_histo_clusterEnergyResidualNoPunchThrough= m_histoFactory.createCloud1D("m_histo_clusterEnergyResidualNoPunchThrough");
+	    m_histo_clusterEnergyResidualVsMomentumNoPunchThrough= m_histoFactory.createCloud2D("m_histo_clusterEnergyResidualVsMomentumNoPunchThrough");
+	    m_histo_normalizedClusterEnergyResidualNoPunchThrough= m_histoFactory.createCloud1D("m_histo_normalizedClusterEnergyResidualNoPunchThrough");
+	    m_histo_normalizedClusterEnergyResidualVsMomentumNoPunchThrough= m_histoFactory.createCloud2D("m_histo_normalizedClusterEnergyResidualVsMomentumNoPunchThrough");
+
 	    m_histo_coreEffic= m_histoFactory.createCloud1D("m_histo_coreEffic");
 	    m_histo_corePurity= m_histoFactory.createCloud1D("m_histo_corePurity");
 	    m_histo_realEffic= m_histoFactory.createCloud1D("m_histo_realEffic");
@@ -3157,4 +3201,98 @@
 	}
 	return totalMomentum;
     }
+
+    boolean isPunchThrough(Collection<Cluster> showerComponents, List<SharedClusterGroup> allSharedClusters) {
+	Cluster clusterToCheckForPunchThrough = null;
+	if (m_checkSharedHitsForPunchThrough) {
+	    Cluster sharedHitCluster = makeClusterOfSharedHits(showerComponents, allSharedClusters);
+	    List<Cluster> showerComponentsPlusSharedHits = new Vector<Cluster>();
+	    showerComponentsPlusSharedHits.addAll(showerComponents);
+	    showerComponentsPlusSharedHits.add(sharedHitCluster);
+	    clusterToCheckForPunchThrough = makeCombinedCluster(showerComponentsPlusSharedHits);
+	} else {
+	    clusterToCheckForPunchThrough = makeCombinedCluster(showerComponents);
+	}
+	// Check for regular punch-through (shows up as hits in outermost layers of HCAL:
+	int hitCount = countHitsInLastLayersOfHcal(clusterToCheckForPunchThrough, m_punchThroughLayers);
+	boolean longitudinalPunchThrough = (hitCount >= m_punchThroughHitMinimum);
+	// Also check for tracks that punch through the sides of the barrel
+	int barrelEdgeHitCount = countHitsInSideEdgesOfHcal(clusterToCheckForPunchThrough, m_punchThroughLayers);
+	boolean transversePunchThrough = (barrelEdgeHitCount >= m_punchThroughHitMinimum);
+	return (longitudinalPunchThrough || transversePunchThrough);
+    }
+
+    protected int countHitsInLastLayersOfHcal(ReconstructedParticle part, int nLayersToCheck) {
+	return countHitsInLastLayersOfHcal(part.getClusters(), nLayersToCheck);
+    }
+    protected int countHitsInLastLayersOfHcal(Cluster clus, int nLayersToCheck) {
+	List<Cluster> tmpList = new Vector<Cluster>();
+	tmpList.add(clus);
+	return countHitsInLastLayersOfHcal(tmpList, nLayersToCheck);
+    }
+    protected int countHitsInLastLayersOfHcal(Collection<Cluster> clusters, int nLayersToCheck) {
+	// Pick up detector geometry
+        Detector det = m_event.getDetector();
+        CylindricalCalorimeter hadBarrel = ((CylindricalCalorimeter) det.getSubdetectors().get("HADBarrel"));
+        CylindricalCalorimeter hadEndcap = ((CylindricalCalorimeter) det.getSubdetectors().get("HADEndcap"));
+        int nLayersHadBarrel = hadBarrel.getLayering().getLayerCount();
+        int nLayersHadEndcap = hadEndcap.getLayering().getLayerCount();
+
+	// Scan for hits
+	Set<Long> hitsFoundInLastLayersOfHcal = new HashSet<Long>();
+	for (Cluster clus : clusters) {
+	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		int nLayersInSubdet = 0;
+		org.lcsim.geometry.Subdetector subdet = hit.getSubdetector();
+		String name = subdet.getName();
+		if (name.compareTo("HADBarrel")==0) {
+		    nLayersInSubdet = nLayersHadBarrel;
+		} else if (name.compareTo("HADEndcap")==0) {
+		    nLayersInSubdet = nLayersHadEndcap;
+		} else {
+		    // Not in HCAL
+		    continue;
+		}
+		org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
+		id.setID(hit.getCellID());
+		int layer = id.getLayer();
+		if (layer >= nLayersInSubdet-nLayersToCheck) {
+		    // In last n layers
+		    hitsFoundInLastLayersOfHcal.add(hit.getCellID());
+		}
+	    }
+	}
+	return hitsFoundInLastLayersOfHcal.size();
+    }
+    protected int countHitsInSideEdgesOfHcal(Cluster clus, int nLayersToCheck) {
+	// Pick up detector geometry
+        Detector det = m_event.getDetector();
+        CylindricalCalorimeter hadBarrel = ((CylindricalCalorimeter) det.getSubdetectors().get("HADBarrel"));
+	double backZ = hadBarrel.getZMax();
+	double innerRadius = hadBarrel.getInnerRadius();
+	double tanTheta = innerRadius/backZ;
+
+	org.lcsim.geometry.IDDecoder id = hadBarrel.getIDDecoder();
+	if (id instanceof org.lcsim.geometry.segmentation.NonprojectiveCylinder) {
+	    org.lcsim.geometry.segmentation.NonprojectiveCylinder segmentation = (org.lcsim.geometry.segmentation.NonprojectiveCylinder) id;
+	    double gridZ = segmentation.getGridSizeZ();
+	    // We want to include at least (nLayersToCheck) layers.
+	    // The layer segmentation is (gridZ).
+	    // For a MIP travelling at a polar angle theta, this means
+	    // a Z range of at least (nLayersToCheck*gridZ)/tanTheta.
+	    double nLayersToCheck_double = nLayersToCheck;
+	    double rangeZ = nLayersToCheck_double * gridZ / tanTheta;
+	    double backCellsRange = (backZ - rangeZ); // minimum Z to be included
+	    Set<Long> backCellsFound = new HashSet<Long>();
+	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		double hitZ = hit.getPosition()[2];
+		if (Math.abs(hitZ) > backCellsRange) {
+		    backCellsFound.add(hit.getCellID());
+		}
+	    }
+	    return backCellsFound.size();
+	} else {
+	    throw new AssertionError("Help! Don't know how to handle barrel geometry of class "+id.getClass().getName());
+	}
+    }
 }
CVSspam 0.2.8