Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDriver.java+335-1821.13 -> 1.14
ReclusterDTreeDriver.java+93-31.5 -> 1.6
+428-185
2 modified files
MJC: Refactor code a bit and add some debug info. Also fix two bugs: one where out-of-neighbourhood scoring penalty was being applied too often for DTree mips/clumps, another where subclusters already assigned to a shower were being stolen by a second shower in probeFullPropagation()

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.13 -> 1.14
diff -u -r1.13 -r1.14
--- ReclusterDriver.java	20 Dec 2007 04:06:16 -0000	1.13
+++ ReclusterDriver.java	4 Jan 2008 19:20:27 -0000	1.14
@@ -18,6 +18,8 @@
 import hep.physics.particle.Particle;
 import org.lcsim.recon.cluster.structural.likelihood.*;
 import org.lcsim.recon.cluster.structural.*;
+import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
+import org.lcsim.geometry.Detector;
 
 /**
   * An algorithm to recluster showers using E/p information
@@ -30,7 +32,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.13 2007/12/20 04:06:16 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.14 2008/01/04 19:20:27 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -133,6 +135,7 @@
     public void process(EventHeader event) {
 	super.process(event); // hmm...
 	m_event = event;
+	initGeometry(event);
 
 	// Read in
 	List<Track> trackList = event.get(Track.class, m_inputTrackList);
@@ -332,161 +335,8 @@
 
 	// Some debug printout
 	if (m_debug) {
-	    System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
-	    for (Track tr : unmatchedTracks) {
-		Hep3Vector p = new BasicHep3Vector(tr.getMomentum());
-		double px = p.x();
-		double py = p.y();
-		double pz = p.z();
-		double pmag = p.magnitude();
-		double pt = Math.sqrt(px*px + py*py);
-		double cosTheta = pz/pmag;
-		String printme = new String("Track with p="+p.magnitude()+" (cosTheta="+cosTheta+")");
-		LocalHelixExtrapolationTrackClusterMatcher extrap = new LocalHelixExtrapolationTrackClusterMatcher();
-		extrap.process(m_event);
-		Hep3Vector point = extrap.performExtrapolation(tr);
-		if (point == null) {
-		    printme += " -- extrapolation point is null";
-		} else {
-		    double x = point.x();
-		    double y = point.y();
-		    double z = point.z();
-		    double r = Math.sqrt(x*x + y*y);
-		    printme += " -- extrapolation point at x="+point.x()+", y="+point.y()+", z="+point.z()+", r="+r;
-		}
-		System.out.println(printme);
-		if (point != null) {
-		    System.out.println("Dumping ECAL hits from this particle:");
-		    MCParticle trackTruth = ((BaseTrackMC)(tr)).getMCParticle();
-		    List<CalorimeterHit> EcalBarrDigiHits = event.get(CalorimeterHit.class, "EcalBarrDigiHits");
-		    List<CalorimeterHit> EcalEndcapDigiHits  = event.get(CalorimeterHit.class, "EcalEndcapDigiHits");
-		    List<SimCalorimeterHit> EcalHits = new Vector<SimCalorimeterHit>();
-		    for (CalorimeterHit hit : EcalBarrDigiHits) { EcalHits.add((SimCalorimeterHit)(hit)); }
-		    for (CalorimeterHit hit : EcalEndcapDigiHits) { EcalHits.add((SimCalorimeterHit)(hit)); }
-		    for (SimCalorimeterHit hit : EcalHits) {
-			int mcCount = hit.getMCParticleCount();
-			boolean matchedTruth = false;
-			for (int i=0; i<mcCount; i++) {
-			    MCParticle mc = hit.getMCParticle(i);
-			    if (mc == trackTruth) {
-				matchedTruth = true;
-				break;
-			    }
-			}
-			Hep3Vector hitPos = new BasicHep3Vector(hit.getPosition());
-			Hep3Vector displacement = VecOp.sub(hitPos, point);
-			double r = Math.sqrt(hitPos.x()*hitPos.x() + hitPos.y()*hitPos.y());
-			double separation = displacement.magnitude();
-			if (separation < 50.0) {
-			    String collection = "Collection=";
-			    for (Cluster clus : mips) { if (clus.getCalorimeterHits().contains(hit)) { collection += "MIPs,"; break; } }
-			    for (Cluster clus : clumps) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Clumps,"; break; } }
-			    for (Cluster clus : photonClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Photons,"; break; } }
-			    for (Cluster clus : largeClustersWithoutSkeletons) { if (clus.getCalorimeterHits().contains(hit)) { collection += "LargeClusWithoutSkel,"; break; } }
-			    for (Cluster clus : smallClustersWithoutSkeletons) { if (clus.getCalorimeterHits().contains(hit)) { collection += "SmallClusWithoutSkel,"; break; } }
-			    if (unusedHits.values().contains(hit)) { collection += "UnusedHits,"; }
-			    for (Cluster clus : largeClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "LargeClus,"; break; } }
-			    for (Cluster clus : skeletonClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Skel,"; break; } }
-			    System.out.println("   hit at r="+r+", z="+hitPos.z()+", x="+hitPos.x()+", y="+hitPos.y()+" with separation="+displacement.magnitude()+" -- in list "+collection);
-			}
-		    }
-		}
-	    }
-
-	    System.out.println("DEBUG: Here are the "+tracksMatchedToClusters.keySet().size()+" matched tracks");
-	    for (Track tr : tracksMatchedToClusters.keySet()) {
-		Cluster clus = tracksMatchedToClusters.get(tr);
-		double[] p = tr.getMomentum();
-		Particle mcTruth = null;
-		if (tr instanceof ReconTrack) {
-		    mcTruth = ((ReconTrack)(tr)).getMCParticle();
-		}
-		if (tr instanceof BaseTrackMC) {
-		    mcTruth = ((BaseTrackMC)(tr)).getMCParticle();
-		}
-		double mom = Math.sqrt(p[0]*p[0] + p[1]*p[1] + p[2]*p[2]);
-		// Which list is that from?
-		String printme = new String("Matched track with p=");
-		printme += mom;
-		if (mcTruth != null) {
-		    printme += " (truth type "+mcTruth.getType()+")";
-		}
-		printme += " to a cluster with ";
-		printme += clus.getCalorimeterHits().size();
-		printme += " hits and energy of ";
-		printme += clus.getEnergy();
-		if (mips.contains(clus)) { printme += " [from MIPS]"; }
-		if (clumps.contains(clus)) { printme += " [from CLUMPS]"; }
-		if (largeClustersWithoutSkeletons.contains(clus)) { printme += " [from LARGE CLUSTERS W/O SKELETONS]"; }
-		if (photonClusters.contains(clus)) { printme += " [from PHOTONS]"; }
-		if (smallClustersWithoutSkeletons.contains(clus)) { printme += " [from SMALL CLUSTERS]"; }
-		Map<MCParticle, List<CalorimeterHit>> clusterTruth = new HashMap<MCParticle, List<CalorimeterHit>>();
-		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
-		    SimCalorimeterHit simHit = (SimCalorimeterHit)(hit);
-		    MCParticle mcTruthPart = simHit.getMCParticle(0);
-		    List<CalorimeterHit> hitList = clusterTruth.get(mcTruthPart);
-		    if (hitList == null) {
-			hitList = new Vector<CalorimeterHit>();
-			clusterTruth.put(mcTruthPart, hitList);
-		    }
-		    hitList.add(hit);
-		}
-		MCParticle maxParticle = null;
-		for (MCParticle part : clusterTruth.keySet()) {
-		    if (maxParticle == null || clusterTruth.get(maxParticle).size() < clusterTruth.get(part).size()) {
-			maxParticle = part;
-		    }
-		}
-		printme += " (dominant particle: "+maxParticle.getType().getPDGID()+" with "+clusterTruth.get(maxParticle).size()+" hits)";
-
-		LocalHelixExtrapolationTrackClusterMatcher extrap = new LocalHelixExtrapolationTrackClusterMatcher();
-		extrap.process(m_event);
-		Hep3Vector point = extrap.performExtrapolation(tr);
-		printme += " -- extrap point is ";
-		if (point == null) {
-		    printme += "null";
-		} else {
-		    double x = point.x();
-		    double y = point.y();
-		    double z = point.z();
-		    double r = Math.sqrt(x*x + y*y);
-		    printme += "r="+r+", z="+z;
-		}
-		
-		// Check in case incorrect track match
-		if (maxParticle != mcTruth) {
-		    printme += " -- MISTAKE in track-cluster matching!";
-		    if (point != null) {
-			printme += " Nearest correct hit is: ";
-			double minDistSq = -1.0;
-			HitMap hitsEcal = ((HitMap)(m_event.get("inputHitMapEcal")));
-			for (CalorimeterHit hit : hitsEcal.values()) {
-			    boolean truthMatch = false;
-			    SimCalorimeterHit simhit = ((SimCalorimeterHit)(hit));
-			    for (int i=0; i<simhit.getMCParticleCount(); i++) {
-				if (simhit.getMCParticle(i) == mcTruth) {
-				    truthMatch = true;
-				    break;
-				}
-			    }
-			    if (truthMatch) {
-				double[] hitPos = hit.getPosition();
-				double distSq = (hitPos[0]-point.x())*(hitPos[0]-point.x()) + (hitPos[1]-point.y())*(hitPos[1]-point.y()) + (hitPos[2]-point.z())*(hitPos[2]-point.z());
-				if (distSq < minDistSq || minDistSq < 0.0) { minDistSq = distSq; }
-			    }
-			}
-			printme += Math.sqrt(minDistSq)+" away from point (vs "+proximity(clus,point)+" for found cluster). By category, nearby correct clusters are:";
-			double window = 50.0; // somewhat arbitrary
-			for (Cluster tmpclus : mips) { if (proximity(tmpclus,point) < window) { printme += "\n  MIP["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;"; if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
-			for (Cluster tmpclus : clumps) { if (proximity(tmpclus,point) < window) { printme += "\n  Clump["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
-			for (Cluster tmpclus : largeClustersWithoutSkeletons) { if (proximity(tmpclus,point) < window) { printme += "\n  Large["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
-			for (Cluster tmpclus : smallClustersWithoutSkeletons) { if (proximity(tmpclus,point) < window) { printme += "\n  Small["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
-			for (Cluster tmpclus : wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons) { if (proximity(tmpclus,point) < window) { printme += "\n  HitFromLarge["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
-		    }
-		}
-	
-		System.out.println(printme);
-	    }
+	    debugPrintUnmatchedTrackInfo(unmatchedTracks, mips, clumps, photonClusters, largeClustersWithoutSkeletons, smallClustersWithoutSkeletons, largeClusters, skeletonClusters, unusedHits);
+	    debugPrintMatchedTrackInfo(tracksMatchedToClusters, mips, clumps, photonClusters, largeClustersWithoutSkeletons, smallClustersWithoutSkeletons, wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons);
 	}
 
 	// OK. Now pick out those clusters that have something connected to them...
@@ -650,16 +500,18 @@
 	    newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
 	    long timeAtStartOfEventTracksIteration= Calendar.getInstance().getTimeInMillis(); // DEBUG
 	    for (Track tr : tracksSortedByMomentum) {
-		Set<Cluster> vetoedAdditions = new HashSet<Cluster>();
+		Set<Cluster> vetoedAdditionsDueToEoverP = new HashSet<Cluster>();
+		Set<Cluster> vetoedAdditionsDueToTrackSeed = new HashSet<Cluster>();
+		Set<Cluster> vetoedTrackSeeds = new HashSet<Cluster>();
 		long timeAtStartOfTrackIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
-		Set<Cluster> showerComponents = iterateOnOneTrack(tr, tracksMatchedToClusters, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditions);
+		Set<Cluster> showerComponents = iterateOnOneTrack(tr, tracksMatchedToClusters, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditionsDueToEoverP, vetoedAdditionsDueToTrackSeed, vetoedTrackSeeds);
 		long timeAtEndOfTrackIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
 		if (m_debugTiming) { System.out.println("DEBUG: iterateOnOneTrack() took "+(timeAtEndOfTrackIteration-timeAtStartOfTrackIteration)+" ms."); }
 		newMapTrackToShowerComponents.put(tr, showerComponents);
 		for (Cluster clus : showerComponents) {
 		    newMapShowerComponentToTrack.put(clus, tr);
 		}
-		newMapTrackToVetoedAdditions.put(tr, vetoedAdditions);
+		newMapTrackToVetoedAdditions.put(tr, vetoedAdditionsDueToEoverP);
 	    }
 	    long timeAtEndOfEventTracksIteration= Calendar.getInstance().getTimeInMillis(); // DEBUG
 	    if (m_debugTiming) { System.out.println("DEBUG: In event-level iteration "+iIter+", looping over "+tracksSortedByMomentum.size()+" tracks took "+(timeAtEndOfEventTracksIteration-timeAtStartOfEventTracksIteration)+" ms (plus "+(timeAtStartOfEventTracksIteration-timeAtStartOfEventIteration)+" ms prep time)"); }
@@ -1053,6 +905,67 @@
 	m_event = null;
     }
 
+    protected boolean photonNotAtInnerSurface(Cluster photon) {
+	// This is a little fiddly... remember that a small layer number doesn't necessarily
+	// mean that the hit is close to the inner surface. Instead, compute the distance into
+	// the calorimeter along the line projecting back to the IP.
+
+	// What is the angle (cosTheta) of the corner?
+	double corner_z = m_ECAL_endcap_z;
+	double corner_r = m_ECAL_barrel_r;
+	double cosThetaOfCorner = corner_r / Math.sqrt(corner_r*corner_r + corner_z*corner_z);
+
+	double minDistanceToSurface = 0.0;
+	boolean firstHit = true;
+	for (CalorimeterHit hit : photon.getCalorimeterHits()) {
+	    // How far is this hit from the ECAL inner surface?
+	    Hep3Vector hitpos = new BasicHep3Vector(hit.getPosition());
+
+	    // Does our hit project back to the barrel or to the endcap?
+	    double hitCosTheta = hitpos.z() / hitpos.magnitude();
+	    boolean hitUsesBarrel = ( Math.abs(hitCosTheta) < Math.abs(cosThetaOfCorner) );
+
+	    double distanceOfThisHit = 0.0;
+	    double hit_r = Math.sqrt(hitpos.x()*hitpos.x() + hitpos.y()*hitpos.y());
+	    if (hitUsesBarrel) {
+		double projectedPointOnSurface_z = (m_ECAL_barrel_r / hit_r) * hitpos.z();
+		double dr = hit_r - m_ECAL_barrel_r;
+		double dz = hitpos.z() - projectedPointOnSurface_z;
+		distanceOfThisHit = Math.sqrt(dr*dr + dz*dz);
+	    } else {
+		double projectedPointOnSurface_r = (m_ECAL_endcap_z / hitpos.z()) * hit_r;
+		double dr = hit_r - projectedPointOnSurface_r;
+		double dz = hitpos.z() - m_ECAL_endcap_z;
+		distanceOfThisHit = Math.sqrt(dr*dr + dz*dz);
+	    }
+
+	    if (distanceOfThisHit < minDistanceToSurface || firstHit) {
+		minDistanceToSurface = distanceOfThisHit;
+	    }
+	}
+
+	return (minDistanceToSurface > 40.0); // 4cm
+    }
+
+    protected void initPotentialLinks_PhotonMip(Collection<Cluster> photons, Collection<Cluster> mips, double scaleFactorTrackToClump) {
+	List<Cluster> photonsNotAtInnerSurface = new Vector<Cluster>();
+	for (Cluster photon : photons) {
+	    if (photonNotAtInnerSurface(photon)) {
+		photonsNotAtInnerSurface.add(photon);
+	    }
+	}
+	initPotentialLinks_MipClump(mips, photonsNotAtInnerSurface, scaleFactorTrackToClump, false); // FIXME: Don't hard-code
+    }
+    protected void initPotentialLinks_PhotonMisc(Collection<Cluster> photons, Collection<Cluster> clusters) {
+	List<Cluster> photonsNotAtInnerSurface = new Vector<Cluster>();
+	for (Cluster photon : photons) {
+	    if (photonNotAtInnerSurface(photon)) {
+		photonsNotAtInnerSurface.add(photon);
+	    }
+	}
+	initPotentialLinks_MiscMisc(photonsNotAtInnerSurface, clusters, 50.0, "photons", "???"); // FIXME: Don't hard-code
+    }
+
     protected void initPotentialLinks_cheating(Collection<Cluster> clusters, Map<Cluster,List<Track>> clustersMatchedToTracks) {
 	Cluster[] tmpArray = new Cluster[1];
 	Cluster[] clusterArray = clusters.toArray(tmpArray);
@@ -1120,7 +1033,7 @@
 	}
     }
 
-    protected void initPotentialLinks_MipClump(List<Cluster> mips, List<Cluster> clumps, double scaleFactorTrackToClump, boolean applyPenaltyFactor)
+    protected void initPotentialLinks_MipClump(Collection<Cluster> mips, Collection<Cluster> clumps, double scaleFactorTrackToClump, boolean applyPenaltyFactor)
     {
 	for (Cluster mip : mips) {
 	    for (Cluster clump : clumps) {
@@ -1145,7 +1058,7 @@
 	}
     }
 
-    protected void initPotentialLinks_MipMisc(List<Cluster> mips, List<Cluster> miscClusters, double thresholdForProximity, String typeName)
+    protected void initPotentialLinks_MipMisc(Collection<Cluster> mips, Collection<Cluster> miscClusters, double thresholdForProximity, String typeName)
     {
 	for (Cluster mip : mips) {
 	    for (Cluster clus : miscClusters) {
@@ -1171,7 +1084,7 @@
 	}
     }
 
-    protected void initPotentialLinks_MiscMisc(List<Cluster> list1, List<Cluster> list2, double thresholdForProximity, String type1, String type2)
+    protected void initPotentialLinks_MiscMisc(Collection<Cluster> list1, Collection<Cluster> list2, double thresholdForProximity, String type1, String type2)
     {
 	for (Cluster clus1 : list1) {
 	    if (list2.contains(clus1)) { throw new AssertionError("Book-keeping error"); }
@@ -1213,14 +1126,17 @@
     protected void initPotentialLinks_SkeletonsWithinLargeClus(List<Cluster> largeClusters, List<Cluster> mips, List<Cluster> clumps, double  scaleFactorTrackToTrack, double scaleFactorTrackToClump)
     {
 	for (Cluster largeClus : largeClusters) {
-	    List<Cluster> subClustersMips = new Vector<Cluster>();
-	    List<Cluster> subClustersClumps = new Vector<Cluster>();
-	    for (Cluster subClus : largeClus.getClusters()) {
-		for (Cluster subsubClus : subClus.getClusters()) {
-		    if (mips.contains(subsubClus)) { subClustersMips.add(subsubClus); }
-		    if (clumps.contains(subsubClus)) { subClustersClumps.add(subsubClus); }
-		}		
-	    }
+// 	    List<Cluster> subClustersMips = new Vector<Cluster>();
+// 	    List<Cluster> subClustersClumps = new Vector<Cluster>();
+// 	    for (Cluster subClus : largeClus.getClusters()) {
+// 		for (Cluster subsubClus : subClus.getClusters()) {
+// 		    if (mips.contains(subsubClus)) { subClustersMips.add(subsubClus); }
+// 		    if (clumps.contains(subsubClus)) { subClustersClumps.add(subsubClus); }
+// 		}		
+// 	    }
+	    List<Cluster> subClustersMips = componentsInLargeCluster(largeClus, mips);
+	    List<Cluster> subClustersClumps = componentsInLargeCluster(largeClus, clumps);
+
 	    if (m_debugLinkScores) { System.out.println("DEBUG: Large cluster with "+largeClus.getCalorimeterHits().size()+" hits contains "+subClustersMips.size()+" MIPs and "+subClustersClumps.size()+" clumps."); }
 	    // Potential mip-mip links
 	    initPotentialLinks_MipMip(subClustersMips, scaleFactorTrackToTrack, false);
@@ -1693,7 +1609,7 @@
 	return dominant;
     }
 
-    Set<CalorimeterHit> findHitsFromTruth_P(List<MCParticle> partList, Collection<CalorimeterHit> hits) {
+    protected Set<CalorimeterHit> findHitsFromTruth_P(List<MCParticle> partList, Collection<CalorimeterHit> hits) {
 	Set<CalorimeterHit> outputHits = new HashSet<CalorimeterHit>();
 	for (MCParticle part : partList) {
 	    List<CalorimeterHit> foundHits = findHitsFromTruth_P(part, hits);
@@ -1701,7 +1617,7 @@
 	}
 	return outputHits;
     }
-    Set<CalorimeterHit> findHitsFromTruth_T(List<Track> trackList, Collection<CalorimeterHit> hits) {
+    protected Set<CalorimeterHit> findHitsFromTruth_T(List<Track> trackList, Collection<CalorimeterHit> hits) {
 	Set<CalorimeterHit> outputHits = new HashSet<CalorimeterHit>();
 	for (Track tr : trackList) {
 	    List<CalorimeterHit> foundHits = findHitsFromTruth_T(tr, hits);
@@ -1710,7 +1626,7 @@
 	return outputHits;
     }
 
-    List<MCParticle> getMCParticle(Track tr) {
+    protected List<MCParticle> getMCParticle(Track tr) {
 	List<MCParticle> output = new Vector<MCParticle>();
 	if (tr instanceof ReconTrack) {
 	    output.add((MCParticle)(((ReconTrack)(tr)).getMCParticle()));
@@ -1732,7 +1648,7 @@
 	}
 	return output;
     }
-    List<CalorimeterHit> findHitsFromTruth_T(Track tr, Collection<CalorimeterHit> hits) {
+    protected List<CalorimeterHit> findHitsFromTruth_T(Track tr, Collection<CalorimeterHit> hits) {
 	List<CalorimeterHit> outputHits = new Vector<CalorimeterHit>();
 	List<MCParticle> contributingParticles = getMCParticle(tr);
 	for (MCParticle part : contributingParticles) {
@@ -1741,7 +1657,7 @@
 	}
 	return outputHits;
     }
-    List<CalorimeterHit> findHitsFromTruth_P(MCParticle part, Collection<CalorimeterHit> hitsToSearch) {
+    protected List<CalorimeterHit> findHitsFromTruth_P(MCParticle part, Collection<CalorimeterHit> hitsToSearch) {
 	List<MCParticle> mcList = m_event.get(MCParticle.class, m_mcList);
 	List<MCParticle> truthParents = findParentsInList(part, mcList);
 	List<CalorimeterHit> truthHits = new Vector<CalorimeterHit>();
@@ -1797,6 +1713,36 @@
         return outputList;
     }   
 
+    protected List<Cluster> componentsInLargeCluster(Cluster largeClus, List<Cluster> subClusters) {
+	List<Cluster> foundSubClusters = new Vector<Cluster>(); // the output
+	Set<Long> hitsInLargeCluster = new HashSet<Long>();
+	for (CalorimeterHit hit : largeClus.getCalorimeterHits()) {
+	    hitsInLargeCluster.add(hit.getCellID());
+	}
+	for (Cluster clus : subClusters) {
+	    boolean foundAny = false;
+	    boolean missedAny = false;
+	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		long cellID = hit.getCellID();
+		if (hitsInLargeCluster.contains(cellID)) {
+		    foundAny = true;
+		} else {
+		    missedAny = true;
+		}
+	    }
+	    if (foundAny && !missedAny) {
+		// Exact match
+		foundSubClusters.add(clus);
+	    } else if (!foundAny && missedAny) {
+		// No match
+		continue;
+	    } else {
+		// Ambiguity
+		throw new AssertionError("Ambiguity in subcluster match");
+	    }
+	}
+	return foundSubClusters;	
+    }
 
     TrackClusterMatcher m_trackClusterMatcher;
     
@@ -1888,7 +1834,7 @@
 	    }
 	}
     }
-    private Set<Cluster> probeFullPropagation(Set<Cluster> allShowerComponents, ScoredLink firstLink, Cluster miniSeed) {
+    private Set<Cluster> probeFullPropagation(Set<Cluster> allShowerComponents, ScoredLink firstLink, Cluster miniSeed, Set<Cluster> unavailableClusters) {
 	Set<Cluster> output = new HashSet<Cluster>();
 	Cluster counterpart = firstLink.counterpart(miniSeed);
 	double threshold = firstLink.score();
@@ -1914,8 +1860,12 @@
 			Cluster potentialNewCluster = testLink.counterpart(clus);
 			if (potentialNewCluster == null) { throw new AssertionError("Null counterpart!"); }
 			if (!allShowerComponents.contains(potentialNewCluster) && !output.contains(potentialNewCluster) && !clustersToAddThisPass.contains(potentialNewCluster)) {
-			    // We haven't used this one yet -- should add it.
-			    clustersToAddThisPass.add(potentialNewCluster);
+			    // We haven't used this one yet -- should add it if it's available
+			    if (unavailableClusters.contains(potentialNewCluster)) {
+				if (m_debug) { System.out.println("DEBUG: Skipping cluster because it's already been used"); }
+			    } else {
+				clustersToAddThisPass.add(potentialNewCluster);
+			    }
 			}
 		    }
 		}
@@ -2170,7 +2120,7 @@
 	return score;
     }
 
-    protected Set<Cluster> iterateOnOneTrack(Track tr, Map<Track,Cluster> tracksMatchedToClusters, List<SharedClusterGroup> allSharedClusters, double threshold, double tolerance, Map<Cluster, Track> newMapShowerComponentToTrack, Set<Cluster> vetoedAdditions) 
+    protected Set<Cluster> iterateOnOneTrack(Track tr, Map<Track,Cluster> tracksMatchedToClusters, List<SharedClusterGroup> allSharedClusters, double threshold, double tolerance, Map<Cluster, Track> newMapShowerComponentToTrack, Set<Cluster> vetoedAdditionsDueToEoverP, Set<Cluster> vetoedAdditionsDueToTrackSeed, Set<Cluster> vetoedTrackSeeds) 
     {
 	// Set seeds aside to make sure we don't add some other
 	// track's seed to our cluster.
@@ -2188,6 +2138,7 @@
 	List<Cluster> tier0 = new Vector<Cluster>();
 	tier0.add(seed);
 	tieredShowerComponents.put(0, tier0);
+	Map<Cluster,Cluster> localMapVetoedClusterToAssociatedSeed = new HashMap<Cluster,Cluster>();
 	for (int iTier=0; iTier<25; iTier++) { // 25 is arbitrary cap in case we get stuck.
 	    long timeAtStartOfThisTierLoop = Calendar.getInstance().getTimeInMillis(); // DEBUG
 	    List<Cluster> componentsInPreviousTier = tieredShowerComponents.get(iTier);
@@ -2223,19 +2174,21 @@
 		    	if (testNotAlreadyInBaseShower && testNotAlreadyInNextTier && testNotAlreadyAssigned && testNotSeed) {
 			    // E/p check with full propagation test
 			    long timeBeforeProbeFullPropagation = Calendar.getInstance().getTimeInMillis(); // DEBUG
-			    Set<Cluster> impliedAdditionalClusters = probeFullPropagation(allShowerComponents, link, miniSeed);
+			    Set<Cluster> impliedAdditionalClusters = probeFullPropagation(allShowerComponents, link, miniSeed, newMapShowerComponentToTrack.keySet());
 			    // Check that full-propagation set doesn't contain any other seeds (that would be a sign that
 			    // something went badly wrong!)
 			    boolean impliedAdditionalClustersContainsIllegalSeed = false;
 			    for (Cluster impliedClus : impliedAdditionalClusters) {
 			    	if (allSeedsInEvent.contains(impliedClus) && impliedClus != seed) {
 				    impliedAdditionalClustersContainsIllegalSeed = true;
-				    break;
+				    //vetoedTrackSeeds.add(impliedClus);
+				    localMapVetoedClusterToAssociatedSeed.put(newClus,impliedClus);
 			    	}
 			    }
 			    long timeAfterProbeFullPropagation = Calendar.getInstance().getTimeInMillis(); // DEBUG
 			    if (m_debugTiming) { System.out.println("DEBUG: probeFullPropagation() took "+(timeAfterProbeFullPropagation-timeBeforeProbeFullPropagation)+" ms"); }
 			    if (impliedAdditionalClustersContainsIllegalSeed) {
+				//vetoedAdditionsDueToTrackSeed.add(newClus);
 				if (m_debug) { System.out.println("DEBUG: Link with score="+link.score()+" to a subcluster with "+newClus.getCalorimeterHits().size()+" hits implied adding a total of "+impliedAdditionalClusters.size()+" clusters -- rejected because this would have picked up another track's seed."); }
 			    } else {
 			    	// OK
@@ -2253,6 +2206,15 @@
 			    	if (testValidEoverP) {
 				    // OK -- add 'em
 				    componentsInNextTier.addAll(impliedAdditionalClusters);
+				    {
+					// MAT DEBUG
+					for (Cluster impliedClus : impliedAdditionalClusters) {
+					    Track impliedTrack = newMapShowerComponentToTrack.get(impliedClus);
+					    if (impliedTrack != null) {
+						throw new AssertionError("Reassignment of cluster with "+impliedClus.getCalorimeterHits().size()+" hits... old track has p="+(new BasicHep3Vector(impliedTrack.getMomentum())).magnitude()+" and new track has p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
+					    }
+					}
+				    }
 				    if (m_debugLinkScores) { 
 				    	String printme = "DEBUG: Link with score="+link.score()+" to a subcluster with "+newClus.getCalorimeterHits().size()+" hits implied adding a total of "+impliedAdditionalClusters.size()+" clusters for a new running total of "+energyOfExistingShowerPlusAdditionalClusters+". Implicitly added:";
 				    	for (Cluster impliedClus : impliedAdditionalClusters) {
@@ -2264,7 +2226,7 @@
 			    	} else {
 				    // Nope -- failed
 				    if (m_debugEoverP) { System.out.println("DEBUG: Link with score="+link.score()+" to a subcluster with "+newClus.getCalorimeterHits().size()+" hits implied adding a total of "+impliedAdditionalClusters.size()+" clusters -- rejected because new running total would be "+energyOfExistingShowerPlusAdditionalClusters); }
-				    vetoedAdditions.add(newClus);
+				    vetoedAdditionsDueToEoverP.add(newClus);
 			    	}
 			    }
 			}
@@ -2290,6 +2252,17 @@
 		break;
 	    }
 	}
+
+	// Done all iteration passes.
+	for (Cluster vetoedClus : localMapVetoedClusterToAssociatedSeed.keySet()) {
+	    if (allShowerComponents.contains(vetoedClus)) {
+		// We added it after all --> ignore
+	    } else {
+		// We never did add it --> list it
+		vetoedAdditionsDueToTrackSeed.add(vetoedClus);
+		vetoedTrackSeeds.add(localMapVetoedClusterToAssociatedSeed.get(vetoedClus));
+	    }
+	}
 	
 	return allShowerComponents;
     }
@@ -2594,5 +2567,185 @@
 	    return psum;
 	}
     }
+
+    protected double m_ECAL_barrel_zmin;
+    protected double m_ECAL_barrel_zmax;
+    protected double m_ECAL_barrel_r;
+    protected double m_ECAL_endcap_z;
+    protected double m_ECAL_endcap_rmin;
+    protected double m_ECAL_endcap_rmax;
+    protected double[] m_fieldStrength;
+    protected void initGeometry(EventHeader event) {
+        Detector det = event.getDetector();
+        CylindricalCalorimeter emb = ((CylindricalCalorimeter) det.getSubdetectors().get("EMBarrel"));
+        CylindricalCalorimeter eme = ((CylindricalCalorimeter) det.getSubdetectors().get("EMEndcap"));
+        m_ECAL_barrel_zmin = emb.getZMin();
+        m_ECAL_barrel_zmax = emb.getZMax();
+        m_ECAL_barrel_r = emb.getLayering().getDistanceToLayerSensorMid(0);
+        m_ECAL_endcap_z = eme.getLayering().getDistanceToLayerSensorMid(0);
+        m_ECAL_endcap_rmin = eme.getInnerRadius();
+        m_ECAL_endcap_rmax = eme.getOuterRadius();
+        double[] zero = {0, 0, 0};
+        m_fieldStrength = det.getFieldMap().getField(zero);
+    }
+
+    protected void debugPrintUnmatchedTrackInfo(List<Track> unmatchedTracks, Collection<Cluster> mips, Collection<Cluster> clumps, Collection<Cluster> photonClusters, Collection<Cluster> largeClustersWithoutSkeletons, Collection<Cluster> smallClustersWithoutSkeletons, Collection<Cluster> largeClusters, Collection<Cluster> skeletonClusters, HitMap unusedHits) {
+	System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
+	for (Track tr : unmatchedTracks) {
+	    Hep3Vector p = new BasicHep3Vector(tr.getMomentum());
+	    double px = p.x();
+	    double py = p.y();
+	    double pz = p.z();
+	    double pmag = p.magnitude();
+	    double pt = Math.sqrt(px*px + py*py);
+	    double cosTheta = pz/pmag;
+	    String printme = new String("Track with p="+p.magnitude()+" (cosTheta="+cosTheta+")");
+	    LocalHelixExtrapolationTrackClusterMatcher extrap = new LocalHelixExtrapolationTrackClusterMatcher();
+	    extrap.process(m_event);
+	    Hep3Vector point = extrap.performExtrapolation(tr);
+	    if (point == null) {
+		printme += " -- extrapolation point is null";
+	    } else {
+		double x = point.x();
+		double y = point.y();
+		double z = point.z();
+		double r = Math.sqrt(x*x + y*y);
+		printme += " -- extrapolation point at x="+point.x()+", y="+point.y()+", z="+point.z()+", r="+r;
+	    }
+	    System.out.println(printme);
+	    if (point != null) {
+		System.out.println("Dumping ECAL hits from this particle:");
+		MCParticle trackTruth = ((BaseTrackMC)(tr)).getMCParticle();
+		List<CalorimeterHit> EcalBarrDigiHits = m_event.get(CalorimeterHit.class, "EcalBarrDigiHits");
+		List<CalorimeterHit> EcalEndcapDigiHits  = m_event.get(CalorimeterHit.class, "EcalEndcapDigiHits");
+		List<SimCalorimeterHit> EcalHits = new Vector<SimCalorimeterHit>();
+		for (CalorimeterHit hit : EcalBarrDigiHits) { EcalHits.add((SimCalorimeterHit)(hit)); }
+		for (CalorimeterHit hit : EcalEndcapDigiHits) { EcalHits.add((SimCalorimeterHit)(hit)); }
+		for (SimCalorimeterHit hit : EcalHits) {
+		    int mcCount = hit.getMCParticleCount();
+		    boolean matchedTruth = false;
+		    for (int i=0; i<mcCount; i++) {
+			MCParticle mc = hit.getMCParticle(i);
+			if (mc == trackTruth) {
+			    matchedTruth = true;
+			    break;
+			}
+		    }
+		    Hep3Vector hitPos = new BasicHep3Vector(hit.getPosition());
+		    Hep3Vector displacement = VecOp.sub(hitPos, point);
+		    double r = Math.sqrt(hitPos.x()*hitPos.x() + hitPos.y()*hitPos.y());
+		    double separation = displacement.magnitude();
+		    if (separation < 50.0) {
+			String collection = "Collection=";
+			for (Cluster clus : mips) { if (clus.getCalorimeterHits().contains(hit)) { collection += "MIPs,"; break; } }
+			for (Cluster clus : clumps) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Clumps,"; break; } }
+			for (Cluster clus : photonClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Photons,"; break; } }
+			for (Cluster clus : largeClustersWithoutSkeletons) { if (clus.getCalorimeterHits().contains(hit)) { collection += "LargeClusWithoutSkel,"; break; } }
+			for (Cluster clus : smallClustersWithoutSkeletons) { if (clus.getCalorimeterHits().contains(hit)) { collection += "SmallClusWithoutSkel,"; break; } }
+			if (unusedHits.values().contains(hit)) { collection += "UnusedHits,"; }
+			for (Cluster clus : largeClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "LargeClus,"; break; } }
+			for (Cluster clus : skeletonClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Skel,"; break; } }
+			System.out.println("   hit at r="+r+", z="+hitPos.z()+", x="+hitPos.x()+", y="+hitPos.y()+" with separation="+displacement.magnitude()+" -- in list "+collection);
+		    }
+		}
+	    }
+	}	    
+    }
+
+    protected void debugPrintMatchedTrackInfo(Map<Track,Cluster> tracksMatchedToClusters, Collection<Cluster> mips, Collection<Cluster> clumps, Collection<Cluster> photonClusters, Collection<Cluster> largeClustersWithoutSkeletons, Collection<Cluster> smallClustersWithoutSkeletons, List<Cluster> wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons) {
+	for (Track tr : tracksMatchedToClusters.keySet()) {
+	    Cluster clus = tracksMatchedToClusters.get(tr);
+	    double[] p = tr.getMomentum();
+	    Particle mcTruth = null;
+	    if (tr instanceof ReconTrack) {
+		mcTruth = ((ReconTrack)(tr)).getMCParticle();
+	    }
+	    if (tr instanceof BaseTrackMC) {
+		mcTruth = ((BaseTrackMC)(tr)).getMCParticle();
+	    }
+	    double mom = Math.sqrt(p[0]*p[0] + p[1]*p[1] + p[2]*p[2]);
+	    // Which list is that from?
+	    String printme = new String("Matched track with p=");
+	    printme += mom;
+	    if (mcTruth != null) {
+		printme += " (truth type "+mcTruth.getType()+")";
+	    }
+	    printme += " to a cluster with ";
+	    printme += clus.getCalorimeterHits().size();
+	    printme += " hits and energy of ";
+	    printme += clus.getEnergy();
+	    if (mips.contains(clus)) { printme += " [from MIPS]"; }
+	    if (clumps.contains(clus)) { printme += " [from CLUMPS]"; }
+	    if (largeClustersWithoutSkeletons.contains(clus)) { printme += " [from LARGE CLUSTERS W/O SKELETONS]"; }
+	    if (photonClusters.contains(clus)) { printme += " [from PHOTONS]"; }
+	    if (smallClustersWithoutSkeletons.contains(clus)) { printme += " [from SMALL CLUSTERS]"; }
+	    Map<MCParticle, List<CalorimeterHit>> clusterTruth = new HashMap<MCParticle, List<CalorimeterHit>>();
+	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		SimCalorimeterHit simHit = (SimCalorimeterHit)(hit);
+		MCParticle mcTruthPart = simHit.getMCParticle(0);
+		List<CalorimeterHit> hitList = clusterTruth.get(mcTruthPart);
+		if (hitList == null) {
+		    hitList = new Vector<CalorimeterHit>();
+		    clusterTruth.put(mcTruthPart, hitList);
+		}
+		hitList.add(hit);
+	    }
+	    MCParticle maxParticle = null;
+	    for (MCParticle part : clusterTruth.keySet()) {
+		if (maxParticle == null || clusterTruth.get(maxParticle).size() < clusterTruth.get(part).size()) {
+		    maxParticle = part;
+		}
+	    }
+	    printme += " (dominant particle: "+maxParticle.getType().getPDGID()+" with "+clusterTruth.get(maxParticle).size()+" hits)";
+	    
+	    LocalHelixExtrapolationTrackClusterMatcher extrap = new LocalHelixExtrapolationTrackClusterMatcher();
+	    extrap.process(m_event);
+	    Hep3Vector point = extrap.performExtrapolation(tr);
+	    printme += " -- extrap point is ";
+	    if (point == null) {
+		printme += "null";
+	    } else {
+		double x = point.x();
+		double y = point.y();
+		double z = point.z();
+		double r = Math.sqrt(x*x + y*y);
+		printme += "r="+r+", z="+z;
+	    }
+	    
+	    // Check in case incorrect track match
+	    if (maxParticle != mcTruth) {
+		printme += " -- MISTAKE in track-cluster matching!";
+		if (point != null) {
+		    printme += " Nearest correct hit is: ";
+		    double minDistSq = -1.0;
+		    HitMap hitsEcal = ((HitMap)(m_event.get("inputHitMapEcal")));
+		    for (CalorimeterHit hit : hitsEcal.values()) {
+			boolean truthMatch = false;
+			SimCalorimeterHit simhit = ((SimCalorimeterHit)(hit));
+			for (int i=0; i<simhit.getMCParticleCount(); i++) {
+			    if (simhit.getMCParticle(i) == mcTruth) {
+				truthMatch = true;
+				break;
+			    }
+			}
+			if (truthMatch) {
+			    double[] hitPos = hit.getPosition();
+			    double distSq = (hitPos[0]-point.x())*(hitPos[0]-point.x()) + (hitPos[1]-point.y())*(hitPos[1]-point.y()) + (hitPos[2]-point.z())*(hitPos[2]-point.z());
+			    if (distSq < minDistSq || minDistSq < 0.0) { minDistSq = distSq; }
+			}
+		    }
+		    printme += Math.sqrt(minDistSq)+" away from point (vs "+proximity(clus,point)+" for found cluster). By category, nearby correct clusters are:";
+		    double window = 50.0; // somewhat arbitrary
+		    for (Cluster tmpclus : mips) { if (proximity(tmpclus,point) < window) { printme += "\n  MIP["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;"; if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
+		    for (Cluster tmpclus : clumps) { if (proximity(tmpclus,point) < window) { printme += "\n  Clump["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
+		    for (Cluster tmpclus : largeClustersWithoutSkeletons) { if (proximity(tmpclus,point) < window) { printme += "\n  Large["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
+		    for (Cluster tmpclus : smallClustersWithoutSkeletons) { if (proximity(tmpclus,point) < window) { printme += "\n  Small["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
+		    for (Cluster tmpclus : wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons) { if (proximity(tmpclus,point) < window) { printme += "\n  HitFromLarge["+tmpclus.getCalorimeterHits().size()+"] at "+proximity(tmpclus,point)+" ;";  if (quoteDominantParticle(tmpclus)==mcTruth) { printme += " (TM)"; } else { printme += " (not TM)"; } } }
+		}
+	    }
+	    
+	    System.out.println(printme);
+	}
+    }
 }
 

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.5 -> 1.6
diff -u -r1.5 -r1.6
--- ReclusterDTreeDriver.java	20 Dec 2007 04:07:05 -0000	1.5
+++ ReclusterDTreeDriver.java	4 Jan 2008 19:20:27 -0000	1.6
@@ -232,6 +232,10 @@
 	}
 	Collections.sort(tracksSortedByMomentum, new MomentumSort());
 
+	if (m_debug) {
+	    debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracks, seeds);
+	}
+
 	// Prep for linking
 	List<Cluster> linkableClusters = new Vector<Cluster>();
 	List<Cluster> smallClustersToShare = new Vector<Cluster>();
@@ -273,6 +277,15 @@
 	    initPotentialLinks_MiscMisc(seedLeftoverHitClusters, treesWithNoStructure, thresholdForProximity, "SmallSeed", "LargeStructurelessTree");
 	    initPotentialLinks_MiscMisc(clumps, treesWithNoStructure, thresholdForProximity, "Clump", "LargeStructurelessTree");
 	    initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
+
+	    // Look for specific case where "photon" candidate is actually part of a hadronic shower:
+	    //   * photon ID passed
+	    //   * photon is not linked directly to the track
+	    //       -> it is not at the inner surface
+	    //       -> it is several layers in
+	    //initPotentialLinks_PhotonMip(nonSeedPhotonClusters, mips, 0.8); // put in a penalty factor
+            //initPotentialLinks_PhotonMisc(nonSeedPhotonClusters, clumps);
+            //initPotentialLinks_PhotonMisc(nonSeedPhotonClusters, treesWithNoStructure);
 	}
 
 	// Done making links. Prep & build skeletons:
@@ -307,14 +320,61 @@
 	    newMapShowerComponentToTrack = new HashMap<Cluster, Track>();
 	    newMapTrackToShowerComponents = new HashMap<Track, Set<Cluster>>();
 	    newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
+	    // Pop the seeds in first:
 	    for (Track tr : tracksSortedByMomentum) {
-		Set<Cluster> vetoedAdditions = new HashSet<Cluster>();
-		Set<Cluster> showerComponents = iterateOnOneTrack(tr, tweakedTracksMatchedToClusters, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditions);
+		Cluster seed = tweakedTracksMatchedToClusters.get(tr);
+		newMapShowerComponentToTrack.put(seed, tr);
+	    }
+	    // Build clusters:
+	    Map<Track, Set<Cluster>> mapTrackToVetoedTrackSeeds = new HashMap<Track, Set<Cluster>>();
+	    Map<Track, Set<Cluster>> mapTrackToVetoedAdditionsDueToTrackSeed = new HashMap<Track, Set<Cluster>>();
+	    for (Track tr : tracksSortedByMomentum) {
+		Set<Cluster> vetoedAdditionsDueToEoverP = new HashSet<Cluster>();
+		Set<Cluster> vetoedAdditionsDueToTrackSeed = new HashSet<Cluster>();
+		Set<Cluster> vetoedTrackSeeds = new HashSet<Cluster>();
+		Set<Cluster> showerComponents = iterateOnOneTrack(tr, tweakedTracksMatchedToClusters, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditionsDueToEoverP, vetoedAdditionsDueToTrackSeed, vetoedTrackSeeds);
 		newMapTrackToShowerComponents.put(tr, showerComponents);
 		for (Cluster clus : showerComponents) {
 		    newMapShowerComponentToTrack.put(clus, tr);
 		}
-		newMapTrackToVetoedAdditions.put(tr, vetoedAdditions);
+		newMapTrackToVetoedAdditions.put(tr, vetoedAdditionsDueToEoverP);
+		mapTrackToVetoedTrackSeeds.put(tr, vetoedTrackSeeds);
+		mapTrackToVetoedAdditionsDueToTrackSeed.put(tr, vetoedAdditionsDueToTrackSeed);
+	    }
+	    for (Track tr : tracksSortedByMomentum) {
+		Set<Cluster> vetoedAdditionsDueToTrackSeed = mapTrackToVetoedAdditionsDueToTrackSeed.get(tr);
+		Set<Cluster> vetoedTrackSeeds = mapTrackToVetoedTrackSeeds.get(tr);
+		if (vetoedTrackSeeds.size()>0) {
+		    if (m_debug) {
+			System.out.println("Watch out! Some of these might also have been vetoed for E/p");
+			System.out.println("DEBUG: For track of p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+", I vetoed "+vetoedTrackSeeds.size()+" seeds since they connected to the seed of another track:");
+			for (Cluster clus : vetoedTrackSeeds) {
+			    String printme = new String("DEBUG:     cluster with "+clus.getCalorimeterHits().size());
+			    Track vetoedTrack = newMapShowerComponentToTrack.get(clus);
+			    if (vetoedTrack != null) {
+				printme += " from track with p="+(new BasicHep3Vector(vetoedTrack.getMomentum())).magnitude();
+			    } else {
+				printme += " [null track]";
+			    }
+			    System.out.println(printme);
+			}
+			System.out.println("DEBUG: For track of p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+", these corresponded to "+vetoedAdditionsDueToTrackSeed.size()+" actual clusters:");
+			for (Cluster clus : vetoedAdditionsDueToTrackSeed) {
+			    String printme = new String("DEBUG:     cluster with "+clus.getCalorimeterHits().size());
+			    Track vetoedTrack = newMapShowerComponentToTrack.get(clus);
+			    if (vetoedTrack != null) {
+				printme += " connected to track with p="+(new BasicHep3Vector(vetoedTrack.getMomentum())).magnitude();
+			    } else {
+				printme += " [null track]";
+			    }
+			    MCParticle domPartOfClus = quoteDominantParticle(clus);
+			    int domPDG = domPartOfClus.getPDGID();
+			    double domMom = domPartOfClus.getMomentum().magnitude();
+			    printme += ". Dominant particle of this cluster is "+domPDG+" with p="+domMom;
+			    System.out.println(printme);
+			}
+		    }
+		}
 	    }
 
 	    // Look for tracks which
@@ -723,4 +783,34 @@
 	     return outputMap;
 	}
     }
+
+    protected void debugPrintTrackInfo(List<Track> trackList, List<Track> unmatchedTracks, Map<Track,Cluster> tracksMatchedToClusters, Set<Track> uniquelyMatchedTracks, Set<Track> ambiguouslyMatchedTracks, List<Track> tweakedTracks, Set<Cluster> seeds) {
+	System.out.println("There were "+trackList.size()+" tracks in the event. Of these, "+unmatchedTracks.size()+" were unmatched and "+tracksMatchedToClusters.size()+" were matched. Of the track matches, "+uniquelyMatchedTracks.size()+" were unique and "+ambiguouslyMatchedTracks.size()+" were ambiguous. After tweaking, there were "+tweakedTracks.size()+" tracks. The event contains "+seeds.size()+" seeds.");
+	System.out.println("Here are the "+unmatchedTracks.size()+" unmatched tracks:");
+	for (Track tr : unmatchedTracks) {
+	    System.out.println(" * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
+	}
+	System.out.println("Here are the "+tracksMatchedToClusters.size()+" matched tracks:");
+	for (Track tr : tracksMatchedToClusters.keySet()) {
+	    Cluster seed = tracksMatchedToClusters.get(tr);
+	    String printme = new String(" * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" matched to a seed with "+(seed.getCalorimeterHits().size())+" hits");
+	    if (uniquelyMatchedTracks.contains(tr)) {
+		printme += " (unique)";
+	    }
+	    if (ambiguouslyMatchedTracks.contains(tr)) {
+		printme += " (ambiguous)";
+	    }
+	    if (uniquelyMatchedTracks.contains(tr) && ambiguouslyMatchedTracks.contains(tr)) {
+		throw new AssertionError("Book-keeping error");
+	    } else if (!uniquelyMatchedTracks.contains(tr) && !ambiguouslyMatchedTracks.contains(tr)) {
+		throw new AssertionError("Book-keeping error");
+	    }
+	    System.out.println(printme);
+	}
+	System.out.println("Here are the "+tracksSortedByMomentum.size()+" tweaked tracks, sorted by momentum:");
+	for (Track tr : tracksSortedByMomentum) {
+	    Cluster seed = tweakedTracksMatchedToClusters.get(tr);
+	    System.out.println(" * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" matched to a seed with "+(seed.getCalorimeterHits().size())+" hits");
+	}
+    }
 }
CVSspam 0.2.8