Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDriver.java+895-231.3 -> 1.4
MJC: In progress...

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.3 -> 1.4
diff -u -r1.3 -r1.4
--- ReclusterDriver.java	16 Oct 2007 01:40:11 -0000	1.3
+++ ReclusterDriver.java	10 Nov 2007 02:10:11 -0000	1.4
@@ -19,6 +19,8 @@
 
 public class ReclusterDriver extends Driver {
 
+    boolean m_megaDebug = false;
+    boolean m_debug = true;
     String m_outputParticleListName = "ReclusteredParticles";
     String m_mcList;
     String m_inputSmallClusters;
@@ -118,7 +120,7 @@
 	    System.out.println("DEBUG: ReclusterDriver read in "+smallClusters.size()+" small clusters comprising "+countSmallClusterHits+" hits.");
 	}
 	
-	// Many of those LargeClusters contain skeleton(s), but a few may not. Identify them.
+	// Many of those LargeClusters [and some of the small clusters] contain skeleton(s), but a few may not. Identify them.
 	List<Cluster> largeClustersWithoutSkeletons = new Vector<Cluster>();
 	for (Cluster largeCluster : largeClusters) {
 	    List<CalorimeterHit> largeClusterHits = largeCluster.getCalorimeterHits();
@@ -136,6 +138,32 @@
 		largeClustersWithoutSkeletons.add(largeCluster);
 	    }
 	}
+	List<Cluster> smallClustersWithoutSkeletons = new Vector<Cluster>();
+	for (Cluster smallCluster : smallClusters) {
+	    List<CalorimeterHit> smallClusterHits = smallCluster.getCalorimeterHits();
+	    boolean smallClusterContainsSkeleton = false;
+	    for (Cluster mip : mips) {
+		if (smallClusterContainsSkeleton) { break; }
+		for (CalorimeterHit hit : mip.getCalorimeterHits()) {
+		    if (smallClusterHits.contains(hit)) {
+			smallClusterContainsSkeleton = true;
+			break;
+		    }
+		}
+	    }
+	    for (Cluster clump : clumps) {
+		if (smallClusterContainsSkeleton) { break; }
+		for (CalorimeterHit hit : clump.getCalorimeterHits()) {
+		    if (smallClusterHits.contains(hit)) {
+			smallClusterContainsSkeleton = true;
+			break;
+		    }
+		}
+	    }
+	    if (!smallClusterContainsSkeleton) {
+		smallClustersWithoutSkeletons.add(smallCluster);
+	    }
+	}
 
 	{
 	    int countLargeHitsUnused = 0;
@@ -171,15 +199,24 @@
 	mixedClusters.addAll(clumps);
 	mixedClusters.addAll(largeClustersWithoutSkeletons);
 	mixedClusters.addAll(photonClusters); // could match [electrons]
-	mixedClusters.addAll(smallClusters);
+	List<Cluster> mixedClustersAndHits = new Vector<Cluster>(mixedClusters);
+	mixedClustersAndHits.addAll(smallClustersWithoutSkeletons);
 	Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
 	Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
 	for (Track tr : trackList) {
+	    // First try excluding the small clusters, which are often just single hits:
 	    Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, mixedClusters);
+	    if (matchedCluster == null) {
+		// Now retry with all:
+		matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, mixedClustersAndHits);
+	    }
 	    if (matchedCluster != null) {
 		tracksMatchedToClusters.put(tr, matchedCluster);
 		List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
-		if (clusTrList == null) { clusTrList = new Vector<Track>(); clustersMatchedToTracks.put(matchedCluster, clusTrList); }
+		if (clusTrList == null) { 
+		    clusTrList = new Vector<Track>(); 
+		    clustersMatchedToTracks.put(matchedCluster, clusTrList); 
+		}
 		clusTrList.add(tr);
 	    }
 	}
@@ -191,7 +228,31 @@
 	unmatchedTracks.removeAll(tracksMatchedToClusters.keySet());
 
 	// Some debug printout
-	System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining.");
+	System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
+	for (Track tr : unmatchedTracks) {
+	    Hep3Vector p = new BasicHep3Vector(tr.getMomentum());
+	    double px = p.x();
+	    double py = p.y();
+	    double pz = p.z();
+	    double pmag = p.magnitude();
+	    double pt = Math.sqrt(px*px + py*py);
+	    double cosTheta = pz/pmag;
+	    String printme = new String("Track with p="+p.magnitude()+" (cosTheta="+cosTheta+")");
+	    LocalHelixExtrapolationTrackClusterMatcher extrap = new LocalHelixExtrapolationTrackClusterMatcher();
+	    extrap.process(m_event);
+	    Hep3Vector point = extrap.performExtrapolation(tr);
+	    if (point == null) {
+		printme += " -- extrapolation point is null";
+	    } else {
+		double x = point.x();
+		double y = point.y();
+		double z = point.z();
+		double r = Math.sqrt(x*x + y*y);
+		printme += " -- extrapolation point at x="+point.x()+", y="+point.y()+", z="+point.z()+", r="+r;
+	    }
+	    System.out.println(printme);
+	}
+
 	System.out.println("DEBUG: Here are the "+tracksMatchedToClusters.keySet().size()+" matched tracks");
 	for (Track tr : tracksMatchedToClusters.keySet()) {
 	    Cluster clus = tracksMatchedToClusters.get(tr);
@@ -218,7 +279,7 @@
 	    if (clumps.contains(clus)) { printme += " [from CLUMPS]"; }
 	    if (largeClustersWithoutSkeletons.contains(clus)) { printme += " [from LARGE CLUSTERS W/O SKELETONS]"; }
 	    if (photonClusters.contains(clus)) { printme += " [from PHOTONS]"; }
-	    if (smallClusters.contains(clus)) { printme += " [from SMALL CLUSTERS]"; }
+	    if (smallClustersWithoutSkeletons.contains(clus)) { printme += " [from SMALL CLUSTERS]"; }
 	    Map<MCParticle, List<CalorimeterHit>> clusterTruth = new HashMap<MCParticle, List<CalorimeterHit>>();
 	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
 		SimCalorimeterHit simHit = (SimCalorimeterHit)(hit);
@@ -237,6 +298,22 @@
 		}
 	    }
 	    printme += " (dominant particle: "+maxParticle.getType().getPDGID()+" with "+clusterTruth.get(maxParticle).size()+" hits)";
+
+	    // DEBUG
+	    LocalHelixExtrapolationTrackClusterMatcher extrap = new LocalHelixExtrapolationTrackClusterMatcher();
+	    extrap.process(m_event);
+	    Hep3Vector point = extrap.performExtrapolation(tr);
+	    printme += " -- extrap point is ";
+	    if (point == null) {
+		printme += "null";
+	    } else {
+		double x = point.x();
+		double y = point.y();
+		double z = point.z();
+		double r = Math.sqrt(x*x + y*y);
+		printme += "r="+r+", z="+z;
+	    }
+
 	    System.out.println(printme);
 	}
 
@@ -256,6 +333,211 @@
 	    seedClusterTrackEnergy.put(seed, new Double(energySum));
 	}
 
+	// Sort matched tracks by momentum
+	List<Track> tracksSortedByMomentum = new Vector<Track>();
+	for (Track tr : tracksMatchedToClusters.keySet()) {
+	    tracksSortedByMomentum.add(tr);
+	}
+	Collections.sort(tracksSortedByMomentum, new MomentumSort());
+
+	// Sanity check: Total cluster energy in event
+	{
+	    Set<Cluster> allClustersInEvent = new HashSet<Cluster>();
+	    Set<CalorimeterHit> allHitsInEvent = new HashSet<CalorimeterHit>();
+	    for (ReconstructedParticle p : muonParticles) { allClustersInEvent.addAll(p.getClusters()); }
+	    allClustersInEvent.addAll(photonClusters);
+	    allClustersInEvent.addAll(skeletonClusters);
+	    allClustersInEvent.addAll(smallClustersWithoutSkeletons);
+	    allClustersInEvent.addAll(largeClusters);
+	    allClustersInEvent.addAll(mips);
+	    allClustersInEvent.addAll(clumps);
+	    allClustersInEvent.addAll(splitSkeletonClusters);
+	    for (Cluster c : allClustersInEvent) { allHitsInEvent.addAll(c.getCalorimeterHits()); }
+	    allHitsInEvent.addAll(unusedHits.values());
+	    BasicCluster tmpClus = new BasicCluster();
+	    for (CalorimeterHit h : allHitsInEvent) { tmpClus.addHit(h); }
+	    System.out.println("DEBUG: Total calorimeter energy in event = "+energy(tmpClus));
+	}
+
+	////////////////////////////////////////////////////////////////////////
+	List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
+	// We can't share seeds... on occasion, a small cluster will be a seed.
+	List<Cluster> smallClusterSeeds = new Vector<Cluster>();
+	for (Cluster seed : seeds) {
+	    if (smallClustersWithoutSkeletons.contains(seed)) {
+		smallClusterSeeds.add(seed);
+	    }
+	}
+	List<Cluster> smallClustersExcludingSeedsEtc = new Vector<Cluster>(smallClustersWithoutSkeletons);
+	smallClustersExcludingSeedsEtc.removeAll(smallClusterSeeds);
+	// Things that are linkable...
+	Set<Cluster> linkableClusters = new HashSet<Cluster>();
+	linkableClusters.addAll(smallClusterSeeds);
+	linkableClusters.addAll(mips);
+	linkableClusters.addAll(clumps);
+	linkableClusters.addAll(photonClusters);
+	linkableClusters.addAll(largeClustersWithoutSkeletons);
+	// Share halo
+	ProximityClusterSharingAlgorithm proximityAlgForHalo = new ProximityClusterSharingAlgorithm(20.0, 100.0);
+	SharedClusterGroup sharedHaloHits = new SharedClusterGroup(wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons, proximityAlgForHalo);
+	sharedHaloHits.createShares(smallClusterSeeds);
+	sharedHaloHits.createShares(mips);
+	sharedHaloHits.createShares(clumps);
+	sharedHaloHits.createShares(photonClusters);
+	sharedHaloHits.createShares(largeClustersWithoutSkeletons);
+	sharedHaloHits.rebuildHints();	
+	allSharedClusters.add(sharedHaloHits);
+	System.out.println("Checking for halo sharing... "+wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons.size()+" hits -> "+sharedHaloHits.listAllSharedClusters().size()+" SharedCluster objects");	
+	debugPrint(smallClusterSeeds, sharedHaloHits, "Small", "halo");
+	debugPrint(mips, sharedHaloHits, "MIP", "halo");
+	debugPrint(clumps, sharedHaloHits, "Clump", "halo");
+	debugPrint(photonClusters, sharedHaloHits, "Photon", "halo");
+	debugPrint(largeClustersWithoutSkeletons, sharedHaloHits, "Large", "halo");
+	// Share small clusters
+	ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
+	SharedClusterGroup sharedSmallClusters = new SharedClusterGroup(smallClustersExcludingSeedsEtc, proximityAlgForSmallClusters);
+	sharedSmallClusters.createShares(smallClusterSeeds);
+	sharedSmallClusters.createShares(mips);
+	sharedSmallClusters.createShares(clumps);
+	sharedSmallClusters.createShares(photonClusters);
+	sharedSmallClusters.createShares(largeClustersWithoutSkeletons);
+	sharedSmallClusters.rebuildHints();
+	allSharedClusters.add(sharedSmallClusters);
+	System.out.println("Checking for small cluster sharing... "+smallClustersExcludingSeedsEtc.size()+" clusters -> "+sharedSmallClusters.listAllSharedClusters().size()+" SharedCluster objects");
+	debugPrint(smallClusterSeeds, sharedSmallClusters, "Small", "halo");
+	debugPrint(mips, sharedSmallClusters, "MIP", "halo");
+	debugPrint(clumps, sharedSmallClusters, "Clump", "halo");
+	debugPrint(photonClusters, sharedSmallClusters, "Photon", "halo");
+	debugPrint(largeClustersWithoutSkeletons, sharedSmallClusters, "Large", "halo");
+	// Cache potential links (warning, there may be a lot!)
+	List<ScoredLink> potentialLinks = new Vector<ScoredLink>();
+	// First look for mip-clump links, only working within a large, contiguous cluster:
+	double scaleFactorTrackToTrack = 1.0;
+	double scaleFactorTrackToClump = 1.0;
+	System.out.println("DEBUG: There are "+largeClusters.size()+" large clusters in the event which can contain skeletons.");
+	for (Cluster largeClus : largeClusters) {
+	    List<Cluster> subClustersMips = new Vector<Cluster>();
+	    List<Cluster> subClustersClumps = new Vector<Cluster>();
+	    for (Cluster subClus : largeClus.getClusters()) {
+		for (Cluster subsubClus : subClus.getClusters()) {
+		    if (mips.contains(subsubClus)) { subClustersMips.add(subsubClus); }
+		    if (clumps.contains(subsubClus)) { subClustersClumps.add(subsubClus); }
+		}		
+	    }
+	    System.out.println("DEBUG: Large cluster with "+largeClus.getCalorimeterHits().size()+" hits contains "+subClustersMips.size()+" MIPs and "+subClustersClumps.size()+" clumps.");
+	    // Potential mip-mip links [careful -- don't double-count]
+	    for (int i=0; i<subClustersMips.size(); i++) {
+		Cluster mip1 = subClustersMips.get(i);
+		for (int j=i+1; j<subClustersMips.size(); j++) {
+		    Cluster mip2 = subClustersMips.get(j);
+		    double likelihood = m_eval.getLinkLikelihoodTrackToTrack(mip1,mip2);
+		    double score = likelihood / scaleFactorTrackToTrack;
+		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
+		    potentialLinks.add(new ScoredLink(mip1, mip2, score));
+		    System.out.println("DEBUG:   MIP["+mip1.getCalorimeterHits().size()+"] -- MIP["+mip2.getCalorimeterHits().size()+"]  = "+score);
+		}
+	    }
+	    // Potential mip-clump links
+	    for (Cluster mip : subClustersMips) {
+		for (Cluster clump : subClustersClumps) {
+		    double likelihood = m_eval.getLinkLikelihoodTrackToClump(mip,clump);
+		    double score = likelihood / scaleFactorTrackToClump;
+		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
+		    potentialLinks.add(new ScoredLink(mip, clump, score));
+		    System.out.println("DEBUG:   MIP["+mip.getCalorimeterHits().size()+"] -- Clump["+clump.getCalorimeterHits().size()+"]  = "+score);
+		}
+	    }
+	}
+	// Potential links to other things...
+	//
+	// ...
+	//
+	// [other legitimate link types]
+	// [...]
+	// OK. Now sort the potential links by score:
+	Collections.sort(potentialLinks, Collections.reverseOrder(new ScoredLinkSort()));
+	System.out.println("Rerecluster: There are "+potentialLinks.size()+" potential links:");
+	for (ScoredLink link : potentialLinks) {
+	    //System.out.println("  Link with score="+link.score());
+	}
+	// Try to build skeletons, working with tracks in momentum-sorted order...
+	Map<Track,Double> newMapTrackToThreshold = new HashMap<Track,Double>(); // for geometrical checks
+	Map<Track,Double> newMapTrackToTolerance = new HashMap<Track,Double>(); // for E/p checks
+	for (Track tr : tracksSortedByMomentum) {
+	    newMapTrackToThreshold.put(tr, new Double(0.7));
+	    newMapTrackToTolerance.put(tr, new Double(1.0));
+	}
+	Map<Cluster, Track> newMapShowerComponentToTrack = null;
+	Map<Track, Set<Cluster>> newMapTrackToShowerComponents = null;
+	Map<Track, Set<Cluster>> newMapTrackToVetoedAdditions = null;
+	for (int iIter=0; iIter<5; iIter++) {
+	    newMapShowerComponentToTrack = new HashMap<Cluster, Track>();
+	    newMapTrackToShowerComponents = new HashMap<Track, Set<Cluster>>();
+	    newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
+	    for (Track tr : tracksSortedByMomentum) {
+		Set<Cluster> vetoedAdditions = new HashSet<Cluster>();
+		Set<Cluster> showerComponents = iterateOnOneTrack(tr, tracksMatchedToClusters, potentialLinks, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditions);
+		newMapTrackToShowerComponents.put(tr, showerComponents);
+		for (Cluster clus : showerComponents) {
+		    newMapShowerComponentToTrack.put(clus, tr);
+		}
+		newMapTrackToVetoedAdditions.put(tr, vetoedAdditions);
+	    }
+	    // Look for tracks which
+	    //   * Have E/p significantly below 1, and/or
+	    //   * Could be linked to a cluster but were vetoed, PROVIDED THAT the cluster was not put in any other shower
+	    Set<Track> tracksWithEoverPTooLow = new HashSet<Track>();
+	    Set<Track> tracksWithVetoedLinkToUnusedCluster = new HashSet<Track>();
+
+	    printStatus("SUMMARY FOR THIS ITERATION:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, photonClusters, mips, clumps, largeClustersWithoutSkeletons, newMapTrackToVetoedAdditions);
+
+	    for (Track tr : tracksSortedByMomentum) {
+		Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+		double clusterEnergy = energy(showerComponents, allSharedClusters);
+		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+		double sigma = 0.7 * Math.sqrt(trackMomentum);
+		double normalizedResidual = (clusterEnergy-trackMomentum)/sigma;
+		if (normalizedResidual < -1.0) {
+		    // FIXME: This cut-off shouldn't be hard-coded
+		    tracksWithEoverPTooLow.add(tr);
+		}
+	    }
+	    for (Cluster clus : linkableClusters) {
+		if (newMapShowerComponentToTrack.get(clus) == null) {
+		    Set<Track> tracksVetoedForThisCluster = new HashSet<Track>();
+		    for (Track trForVeto : tracksSortedByMomentum) {
+			if (newMapTrackToVetoedAdditions.get(trForVeto).contains(clus)) {
+			    tracksWithVetoedLinkToUnusedCluster.add(trForVeto);
+			}
+		    }
+		    
+		}
+	    }
+	    for (Track tr : tracksWithVetoedLinkToUnusedCluster) {
+		Double oldTolerance = newMapTrackToTolerance.get(tr);
+		double newTolerance = oldTolerance.doubleValue() + 0.25;
+		newMapTrackToTolerance.put(tr, newTolerance);
+	    }
+	    for (Track tr : tracksWithEoverPTooLow) {
+		Double oldThreshold = newMapTrackToThreshold.get(tr);
+		double newThreshold = oldThreshold.doubleValue() - 0.05;
+		newMapTrackToThreshold.put(tr, new Double(newThreshold));
+	    }
+	    // NOW: Go back and recluster with lower/higher threshold if below/above target energy.
+	}
+
+	printStatus("FINAL STATUS:",
+		    tracksSortedByMomentum,
+		    allSharedClusters,
+		    newMapTrackToShowerComponents,
+		    newMapShowerComponentToTrack,
+		    newMapTrackToThreshold,
+		    newMapTrackToTolerance,
+		    photonClusters, mips, clumps, largeClustersWithoutSkeletons,
+		    newMapTrackToVetoedAdditions
+		    );
+	////////////////////////////////////////////////////////////////////////
+
 	// Set up initial thresholds
 	Map<Cluster,Double> mapSeedsToThresholds = new HashMap<Cluster,Double>();
 	for (Cluster seed : seeds) {
@@ -318,7 +600,8 @@
 
 
 
-	printStatus("Initial state:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
+	//printStatus("Initial state:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
+	writeClusters("recluster00", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
 
 	// First pass: assign unambiguous split skeletons
 	for (Cluster skel : splitSkeletonClusters) {
@@ -343,7 +626,8 @@
 	    }
 	}
 
-	printStatus("After first pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
+	//printStatus("After first pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
+	writeClusters("recluster01", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
 
 	// Second pass: attach small clusters and hits to nearby seeds OR skeleton components OR large clusters OR photon clusters
 	// Create maps from small thing to big thing
@@ -407,7 +691,8 @@
 	    }
 	}
 
-	printStatus("After second pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);	
+	//printStatus("After second pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);	
+	writeClusters("recluster02", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);	
 
 	// SCORING?
 	//
@@ -442,34 +727,57 @@
 	// would go out of bounds) then we should clearly not add it.
 
 	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	printStatus("After third pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	buildNeutrals(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy, mapAttachSmallClusters, mapAttachHits); // TEST
+	//printStatus("After third pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster03", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+
+	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
+	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
+	//printStatus("After fourth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster04", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+
+	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
+	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
+	//printStatus("After fifth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster05", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 
 	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
 	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	printStatus("After fourth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	//printStatus("After sixth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster06", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 
 	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
 	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	printStatus("After fifth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	//printStatus("After seventh pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster07", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 
 	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
 	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	printStatus("After sixth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	//printStatus("After eigth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster08", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 
 	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
 	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	printStatus("After seventh pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	//printStatus("After ninth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster09", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+
+	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
+	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
+	//printStatus("After tenth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("recluster10", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 
 	// Last step needs to be to stuff in extra hits/clusters if they'll reasonably fit into
 	// a nearby charged cluster, heedless of thresholds.
 
 	fillIn(seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds);
-	printStatus("Final state:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	//printStatus("Final charged state:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+	writeClusters("reclusterFinalCharged", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 
 	// ... now flesh out neutral hadrons (needs to be done)
 
 	// ... now write out!
 	writeOut(seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks);
+	writeClusters("reclusterFinalAll", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 	
 
 	System.out.println("RECLUSTER: ALL DONE");
@@ -604,6 +912,8 @@
 			double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
 			if (clusterEnergy - trackEnergy > 2.0*trackEnergySigma) {
 			    // BZZT -- reject this one as silly
+			    // We should really have rejected this earlier... dangerous and
+			    // inconsistent to skip a direct link but accept indirect links via it.
 			    //System.out.println("DEBUG: Rejected a match from a cluster with "+clus.getCalorimeterHits().size()+" hits to seed with "+seed.getCalorimeterHits().size()+" with score = "+seedScore+" / "+seedThreshold+" because combined energy would be "+clusterEnergy+" but track energy is only "+trackEnergy);
 			} else {
 			    // Sensible-ish...
@@ -651,7 +961,7 @@
 		if (score < 0.0) { score = 0.0; }
 		potentialTrackToTrackLinks.put(newLink, new Double(score));
 		potentialLinks.put(newLink, new Double(score));
-		System.out.println("DEBUG: Init link from MIP "+i+"/"+mips.size()+" ("+clus1.getCalorimeterHits().size()+" hits) to MIP "+j+"/"+mips.size()+" ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score+"... proximity="+proximity(clus1,clus2));
+		//System.out.println("DEBUG: Init link from MIP "+i+"/"+mips.size()+" ("+clus1.getCalorimeterHits().size()+" hits) to MIP "+j+"/"+mips.size()+" ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score+"... proximity="+proximity(clus1,clus2));
 	    }
 	    // Next: Track-Clump links
 	    for (int j=0; j<clumps.size(); j++) {
@@ -667,7 +977,7 @@
 		if (score < 0.0) { score = 0.0; }
 		potentialTrackToClumpLinks.put(newLink, new Double(score));
 		potentialLinks.put(newLink, new Double(score));
-		System.out.println("DEBUG: Init link from MIP "+i+"/"+mips.size()+" ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP "+j+"/"+clumps.size()+" ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToClump+" = "+score+"... proximity="+proximity(clus1,clus2));
+		//System.out.println("DEBUG: Init link from MIP "+i+"/"+mips.size()+" ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP "+j+"/"+clumps.size()+" ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToClump+" = "+score+"... proximity="+proximity(clus1,clus2));
 	    }
 	}
 	// Finally: Clump-Clump links, again being careful of double-counting
@@ -776,7 +1086,9 @@
 		Double score = scoreForEachDestination.get(destination);
 		if (score == null) { throw new AssertionError("Null score"); }
 		scoreMapForThisDestination.put(seed, score);
-		//System.out.println("DEBUG: Added a likelihood link from a component with "+destination.getCalorimeterHits().size()+" hits to seed with "+seed.getCalorimeterHits().size()+" with a score of "+score);
+		if (m_debug) {
+		    //System.out.println("DEBUG: Added a likelihood link from a component with "+destination.getCalorimeterHits().size()+" hits to seed with "+seed.getCalorimeterHits().size()+" with a score of "+score);
+		}
 	    }
 	}
 	
@@ -877,7 +1189,9 @@
 		    Double previousScore = scoresForThisCluster.get(seedOfTarget);
 		    if (previousScore == null || previousScore < combinedScore) {
 			scoresForThisCluster.put(seedOfTarget, new Double(combinedScore));
-			//System.out.println("Added a proximity+angle link from a cluster with "+clus.getCalorimeterHits().size()+" to a seed with "+seedOfTarget.getCalorimeterHits().size()+" via a proxy with "+target.getCalorimeterHits().size()+" with score = "+proximityScore+" * "+likelihoodScore+" * "+dotProductScore+" = "+combinedScore);
+			if (m_megaDebug) {
+			    System.out.println("Added a proximity+angle link from a cluster with "+clus.getCalorimeterHits().size()+" to a seed with "+seedOfTarget.getCalorimeterHits().size()+" via a proxy with "+target.getCalorimeterHits().size()+" with score = "+proximityScore+" * "+likelihoodScore+" * "+dotProductScore+" = "+combinedScore);
+			}
 		    }
 		}
 	    }
@@ -924,6 +1238,9 @@
 			Double previousScore = scoresForThisCluster.get(seedOfTarget);
 			if (previousScore == null || previousScore < score) {
 			    scoresForThisCluster.put(seedOfTarget, new Double(score));
+			    if (m_megaDebug) {
+				System.out.println("Added a proximity link from a cluster with "+clus.getCalorimeterHits().size()+" to a seed with "+seedOfTarget.getCalorimeterHits().size()+" via a proxy with "+target.getCalorimeterHits().size()+" with score = "+score+" [dist="+dist+"]");
+			    }
 			}
 		    }
 		}
@@ -933,6 +1250,102 @@
 	return scoreMap;
     }
 
+    protected void printStatus(String desc, 
+			       List<Track> tracksSortedByMomentum,
+			       List<SharedClusterGroup> allSharedClusters,
+			       Map<Track, Set<Cluster>> newMapTrackToShowerComponents,
+			       Map<Cluster, Track> newMapShowerComponentToTrack,
+			       Map<Track,Double> newMapTrackToThreshold,
+			       Map<Track,Double> newMapTrackToTolerance,
+			       List<Cluster> photonClusters,
+			       List<Cluster> mips,
+			       List<Cluster> clumps,
+			       List<Cluster> largeClustersWithoutSkeletons,
+			       Map<Track, Set<Cluster>> newMapTrackToVetoedAdditions
+			       )
+    {
+	System.out.println(desc);
+	for (Track tr : tracksSortedByMomentum) {
+		Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+		double clusterEnergy = energy(showerComponents, allSharedClusters);
+		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+		double sigma = 0.7 * Math.sqrt(trackMomentum);
+		double normalizedResidual = (clusterEnergy-trackMomentum)/sigma;
+		List<Track> tmpTrackList = new Vector<Track>();
+		tmpTrackList.add(tr);
+		double effic = quoteEfficiency(tmpTrackList, showerComponents);
+		double purity = quotePurity(tmpTrackList, showerComponents);
+		System.out.println("  Track: threshold="+newMapTrackToThreshold.get(tr)
+				   +", tolerance="+newMapTrackToTolerance.get(tr)
+				   +", E/p is "+clusterEnergy+" / "+trackMomentum+" => NormResid = "+normalizedResidual
+				   +". Effic="+effic+", purity="+purity);
+	}
+	for (Cluster clus : photonClusters) {
+	    MCParticle dominantParticle = quoteDominantParticle(clus);
+	    String dominantInfo = new String("Dominant particle is "+dominantParticle.getPDGID()+" with p="+dominantParticle.getMomentum().magnitude());
+	    Track tr = newMapShowerComponentToTrack.get(clus);
+	    if (tr != null) {
+		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+		System.out.println("  Cluster: Photon with "+clus.getCalorimeterHits().size()+" matched to track with p="+trackMomentum+". "+dominantInfo);
+	    } else {
+		System.out.println("  Cluster: Photon with "+clus.getCalorimeterHits().size()+" not matched to a track. "+dominantInfo);
+		for (Track trForVeto : tracksSortedByMomentum) {
+		    if (newMapTrackToVetoedAdditions.get(trForVeto).contains(clus)) {
+			System.out.println("           [vetoed for track with p="+((new BasicHep3Vector(trForVeto.getMomentum())).magnitude())+" for E/p]");
+		    }
+		}
+	    }
+	}
+	for (Cluster clus : mips) {
+	    MCParticle dominantParticle = quoteDominantParticle(clus);
+	    String dominantInfo = new String("Dominant particle is "+dominantParticle.getPDGID()+" with p="+dominantParticle.getMomentum().magnitude());
+	    Track tr = newMapShowerComponentToTrack.get(clus);
+	    if (tr != null) {
+		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+		System.out.println("  Cluster: MIP with "+clus.getCalorimeterHits().size()+" matched to track with p="+trackMomentum+". "+dominantInfo);
+	    } else {
+		System.out.println("  Cluster: MIP with "+clus.getCalorimeterHits().size()+" not matched to a track. "+dominantInfo);
+		for (Track trForVeto : tracksSortedByMomentum) {
+		    if (newMapTrackToVetoedAdditions.get(trForVeto).contains(clus)) {
+			System.out.println("           [vetoed for track with p="+((new BasicHep3Vector(trForVeto.getMomentum())).magnitude())+" for E/p]");
+		    }
+		}
+	    }
+	}
+	for (Cluster clus : clumps) {
+	    MCParticle dominantParticle = quoteDominantParticle(clus);
+	    String dominantInfo = new String("Dominant particle is "+dominantParticle.getPDGID()+" with p="+dominantParticle.getMomentum().magnitude());
+	    Track tr = newMapShowerComponentToTrack.get(clus);
+	    if (tr != null) {
+		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+		System.out.println("  Cluster: Clump with "+clus.getCalorimeterHits().size()+" matched to track with p="+trackMomentum+". "+dominantInfo);
+	    } else {
+		System.out.println("  Cluster: Clump with "+clus.getCalorimeterHits().size()+" not matched to a track. "+dominantInfo);
+		for (Track trForVeto : tracksSortedByMomentum) {
+		    if (newMapTrackToVetoedAdditions.get(trForVeto).contains(clus)) {
+			System.out.println("           [vetoed for track with p="+((new BasicHep3Vector(trForVeto.getMomentum())).magnitude())+" for E/p]");
+		    }
+		}
+	    }
+	}
+	for (Cluster clus : largeClustersWithoutSkeletons) {
+	    MCParticle dominantParticle = quoteDominantParticle(clus);
+	    String dominantInfo = new String("Dominant particle is "+dominantParticle.getPDGID()+" with p="+dominantParticle.getMomentum().magnitude());
+	    Track tr = newMapShowerComponentToTrack.get(clus);
+	    if (tr != null) {
+		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+		System.out.println("  Cluster: LargeClus with "+clus.getCalorimeterHits().size()+" matched to track with p="+trackMomentum+". "+dominantInfo);
+	    } else {
+		System.out.println("  Cluster: LargeClus with "+clus.getCalorimeterHits().size()+" not matched to a track. "+dominantInfo);
+		for (Track trForVeto : tracksSortedByMomentum) {
+		    if (newMapTrackToVetoedAdditions.get(trForVeto).contains(clus)) {
+			System.out.println("           [vetoed for track with p="+((new BasicHep3Vector(trForVeto.getMomentum())).magnitude())+" for E/p]");
+		    }
+		}
+	    }
+	}
+    }
+
     protected void printStatus(String desc, Collection<Cluster> seeds, Map<Cluster,Double> seedClusterTrackEnergy, Map<Cluster,Cluster> mapPhotonClustersToSeeds, Map<Cluster,Cluster> mapSmallClustersToSeeds, Map<Cluster,Cluster> mapLargeClustersToSeeds, Map<Cluster,Cluster> mapMipsToSeeds, Map<Cluster,Cluster> mapClumpsToSeeds, Map<Cluster,Cluster> mapHitsToSeeds, Map<Cluster,List<Track>> clustersMatchedToTracks, Map<Cluster,Double> mapSeedsToThresholds) {
 	System.out.println(desc);
 	for (Cluster seed : seeds) {
@@ -1056,20 +1469,39 @@
 	return output;
     }
 
+    protected boolean testEoverP(double clusterEnergy, Track tr, double tolerance) {
+	double trackMassSq = 0.140 * 0.140;
+	double[] trackThreeMomentum = tr.getMomentum();
+	double trackMomentumSq = trackThreeMomentum[0]*trackThreeMomentum[0] + trackThreeMomentum[1]*trackThreeMomentum[1] + trackThreeMomentum[2]*trackThreeMomentum[2];
+	double trackEnergySq = trackMomentumSq + trackMassSq;
+	double trackEnergy = Math.sqrt(trackEnergySq);
+	double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
+	double normalisedResidual = (clusterEnergy - trackEnergy)/trackEnergySigma;
+	return (normalisedResidual < tolerance);
+    }
     protected double energy(Cluster clus) {
 	return m_calib.getEnergy(clus);
     }
-    protected double energy(List<Cluster> inputClusters) {
+    protected double energy(Collection<Cluster> inputClusters) {
 	Cluster magicCombinedCluster = makeCombinedCluster(inputClusters);
 	return energy(magicCombinedCluster);
     }
-    protected BasicCluster makeCombinedCluster(List<Cluster> inputClusters) {
+    protected BasicCluster makeCombinedCluster(Collection<Cluster> inputClusters) {
 	BasicCluster magicCombinedCluster = new BasicCluster();
 	for (Cluster clus : inputClusters) {
 	    magicCombinedCluster.addCluster(clus);
 	}
 	return magicCombinedCluster;
     }
+    protected Set<Cluster> recursivelyFindSubClusters(Cluster clus) {
+	Set<Cluster> output = new HashSet<Cluster>();
+	output.add(clus);
+	for (Cluster subClus : clus.getClusters()) {
+	    Set<Cluster> subClusterDaughters = recursivelyFindSubClusters(subClus);
+	    output.addAll(subClusterDaughters);
+	}
+	return output;
+    }
 
     protected double proximity(Cluster clus1, Cluster clus2) {
 	if (clus1.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
@@ -1090,11 +1522,11 @@
 	return minDist;
     }
 
-    protected double quoteEfficiency(List<Track> trackList, List<Cluster> clusterList) {
+    protected double quoteEfficiency(List<Track> trackList, Collection<Cluster> clusterList) {
 	Cluster combined = makeCombinedCluster(clusterList);
 	return quoteEfficiency(trackList, combined);
     }
-    protected double quotePurity(List<Track> trackList, List<Cluster> clusterList) {
+    protected double quotePurity(List<Track> trackList, Collection<Cluster> clusterList) {
 	Cluster combined = makeCombinedCluster(clusterList);
 	return quotePurity(trackList, combined);
     }
@@ -1116,6 +1548,35 @@
 	double denom = clus.getCalorimeterHits().size();
 	return num/denom;
     }
+    protected MCParticle quoteDominantParticle(Cluster clus) {
+	List<CalorimeterHit> hits = clus.getCalorimeterHits();
+	Map<MCParticle, List<SimCalorimeterHit>> truthMap = new HashMap<MCParticle, List<SimCalorimeterHit>>();
+	for (CalorimeterHit hit : hits) {
+	    SimCalorimeterHit simhit = (SimCalorimeterHit) (hit);
+	    int maxContrib = -1;
+	    for (int i=0; i<simhit.getMCParticleCount(); i++) {
+		MCParticle p = simhit.getMCParticle(i);
+		double e = simhit.getContributedEnergy(i);
+		if (maxContrib < 0 || e > simhit.getContributedEnergy(maxContrib)) {
+		    maxContrib = i;
+		}
+	    }
+	    MCParticle maxParticle = simhit.getMCParticle(maxContrib);
+	    List<SimCalorimeterHit> hitsOfThisParticle = truthMap.get(maxParticle);
+	    if (hitsOfThisParticle == null) {
+		hitsOfThisParticle = new Vector<SimCalorimeterHit>();
+		truthMap.put(maxParticle, hitsOfThisParticle);
+	    }
+	    hitsOfThisParticle.add(simhit);
+	}
+	MCParticle dominant = null;
+	for (MCParticle p : truthMap.keySet()) {
+	    if (dominant == null || truthMap.get(p).size() > truthMap.get(dominant).size()) {
+		dominant = p;
+	    }
+	}
+	return dominant;
+    }
 
     Set<CalorimeterHit> findHitsFromTruth(List<Track> trackList, List<CalorimeterHit> hits) {
 	Set<CalorimeterHit> outputHits = new HashSet<CalorimeterHit>();
@@ -1217,6 +1678,14 @@
 	protected Cluster c1 = null;
 	protected Cluster c2 = null;
     }
+    private class ScoredLink extends Link {
+	double m_score;
+	public ScoredLink(Cluster clus1, Cluster clus2, double score) {
+	    super(clus1, clus2);
+	    m_score = score;
+	}
+	double score() { return m_score; }
+    }
 
     private class Path {
 	Vector<Cluster> m_steps;
@@ -1284,7 +1753,7 @@
 				    }
 				    printme += " = "+score;
 				}
-				System.out.println(printme);
+				//System.out.println(printme);
 				// Sanity check:
 				Set<CalorimeterHit> usedHits = new HashSet<CalorimeterHit>();
 				for (Cluster step : newPath.getSteps()) {
@@ -1576,4 +2045,407 @@
 	m_event.put(m_outputParticleListName, outputParticleList);
     }
 
+
+    void buildNeutrals(Map<Cluster,Double> mapSeedsToThresholds,
+		       Map<Cluster,Cluster> mapHitsToSeeds,
+		       Map<Cluster,Cluster> mapSmallClustersToSeeds,
+		       Map<Cluster,Cluster> mapLargeClustersToSeeds,
+		       Map<Cluster,Cluster> mapPhotonClustersToSeeds,
+		       Map<Cluster,Cluster> mapMipsToSeeds,
+		       Map<Cluster,Cluster> mapClumpsToSeeds,
+		       Map<List<Cluster>, Map<Cluster,Cluster>> targetMap,
+		       Collection<Cluster> seeds,
+		       List<Cluster> mips,
+		       List<Cluster> clumps,
+		       List<Cluster> skeletonClusters,
+		       Map<Cluster,Double> seedClusterTrackEnergy,
+		       Map<Cluster, Cluster> mapAttachSmallClusters,
+		       Map<Cluster, Cluster> mapAttachHits)
+    {
+	// Figure out which are assigned to seeds and which are not...
+	List<Cluster> unassignedPhotonClusters = new Vector<Cluster>();
+	List<Cluster> unassignedSmallClusters = new Vector<Cluster>();
+	List<Cluster> unassignedLargeClusters = new Vector<Cluster>();
+	List<Cluster> unassignedMips = new Vector<Cluster>();
+	List<Cluster> unassignedClumps = new Vector<Cluster>();
+	List<Cluster> unassignedHits = new Vector<Cluster>();
+
+	for (Cluster clus : mapPhotonClustersToSeeds.keySet()) { if (mapPhotonClustersToSeeds.get(clus) == null) { unassignedPhotonClusters.add(clus); } }
+	for (Cluster clus : mapSmallClustersToSeeds.keySet()) { if (mapSmallClustersToSeeds.get(clus) == null) { unassignedSmallClusters.add(clus); } }
+	for (Cluster clus : mapLargeClustersToSeeds.keySet()) { if (mapLargeClustersToSeeds.get(clus) == null) { unassignedLargeClusters.add(clus); } }
+	for (Cluster clus : mapMipsToSeeds.keySet()) { if (mapMipsToSeeds.get(clus) == null) { unassignedMips.add(clus); } }
+	for (Cluster clus : mapClumpsToSeeds.keySet()) { if (mapClumpsToSeeds.get(clus) == null) { unassignedClumps.add(clus); } }
+	for (Cluster clus : mapHitsToSeeds.keySet()) { if (mapHitsToSeeds.get(clus) == null) { unassignedHits.add(clus); } }
+
+	Map<Cluster, List<Cluster>> mergedPhotonClusters = new HashMap<Cluster, List<Cluster>>();
+	Map<Cluster, List<Cluster>> mergedLargeClusters = new HashMap<Cluster, List<Cluster>>();
+	for (Cluster clus : unassignedPhotonClusters) {
+	    List<Cluster> mergedList = new Vector<Cluster> ();
+	    mergedPhotonClusters.put(clus, mergedList);
+	}
+	for (Cluster clus : unassignedLargeClusters) {
+	    List<Cluster> mergedList = new Vector<Cluster> ();
+	    mergedLargeClusters.put(clus, mergedList);
+	}
+
+	for (Cluster clus : mapAttachSmallClusters.keySet()) {
+	    Cluster target = mapAttachSmallClusters.get(clus);
+	    if (unassignedPhotonClusters.contains(target)) { mergedPhotonClusters.get(target).add(clus); }
+	    if (unassignedLargeClusters .contains(target)) { mergedLargeClusters .get(target).add(clus); }
+	}
+
+	System.out.println("DEBUG: After merge, we made "+mergedPhotonClusters.size()+" photons and "+unassignedLargeClusters.size()+" neutral hadrons");
+    }
+
+    void writeClusters(String name, Collection<Cluster> seeds, Map<Cluster,Double> seedClusterTrackEnergy, Map<Cluster,Cluster> mapPhotonClustersToSeeds, Map<Cluster,Cluster> mapSmallClustersToSeeds, Map<Cluster,Cluster> mapLargeClustersToSeeds, Map<Cluster,Cluster> mapMipsToSeeds, Map<Cluster,Cluster> mapClumpsToSeeds, Map<Cluster,Cluster> mapHitsToSeeds, Map<Cluster,List<Track>> clustersMatchedToTracks, Map<Cluster,Double> mapSeedsToThresholds) {
+	List<Cluster> outputList = new Vector<Cluster>();
+	for (Cluster seed : seeds) {
+	    List<Cluster> matchedPhotons = reverseMap(seed, mapPhotonClustersToSeeds);
+	    List<Cluster> matchedSmallClusters = reverseMap(seed, mapSmallClustersToSeeds);
+	    List<Cluster> matchedLargeClusters = reverseMap(seed, mapLargeClustersToSeeds);
+	    List<Cluster> matchedMips = reverseMap(seed, mapMipsToSeeds);
+	    List<Cluster> matchedClumps = reverseMap(seed, mapClumpsToSeeds);
+	    List<Cluster> matchedHits = reverseMap(seed, mapHitsToSeeds);
+
+	    List<Cluster> combinedClusters = new Vector<Cluster>();
+	    combinedClusters.addAll(matchedPhotons);
+	    combinedClusters.addAll(matchedSmallClusters);
+	    combinedClusters.addAll(matchedLargeClusters);
+	    combinedClusters.addAll(matchedMips);
+	    combinedClusters.addAll(matchedClumps);
+	    combinedClusters.addAll(matchedHits);
+
+	    List<Track> trackList = clustersMatchedToTracks.get(seed);
+
+	    Cluster combined = makeCombinedCluster(combinedClusters);
+	    outputList.add(combined);
+	}
+	m_event.put(name, outputList);
+    }
+
+    ///////////////////////////////////////
+
+    private void debugPrint(List<Cluster> targets, SharedClusterGroup sharedClusters, String targetName, String shareName) {
+	for (Cluster target : targets) {
+	    List<SharedCluster> shares = sharedClusters.findContributingSharedClusters(target); 
+	    if (shares != null) {
+		double weightSum = 0.0; 
+		for (SharedCluster share : shares) { 
+		    Double weight = share.getNormalizedWeight(target);
+		    if (weight != null) {
+			weightSum += weight.doubleValue(); 
+		    } else {
+			throw new AssertionError("Null weight!");
+		    }
+		} 
+		System.out.println(targetName+" cluster with "+target.getCalorimeterHits().size()+" has "+shares.size()+" shared "+shareName+" hits [weight sum "+weightSum+"]."); 
+	    }
+	}
+    }
+    private class MomentumSort implements Comparator<Track> {
+	public MomentumSort() {}
+	public int compare(Track t1, Track t2) {
+	    double[] p1 = t1.getMomentum();
+	    double[] p2 = t2.getMomentum();
+	    double p1mag2 = p1[0]*p1[0] + p1[1]*p1[1] + p1[2]*p1[2];
+	    double p2mag2 = p2[0]*p2[0] + p2[1]*p2[1] + p2[2]*p2[2];
+	    if (p1mag2 < p2mag2) { 
+		return -1;
+	    } else if (p1mag2 > p2mag2) {
+		return 1;
+	    } else {
+		return 0;
+	    }
+	}
+    }
+    private class ScoredLinkSort implements Comparator<ScoredLink> {
+	public ScoredLinkSort() {}
+	public int compare(ScoredLink t1, ScoredLink t2) {
+	    if (t1.score() < t2.score()) {
+		return -1;
+	    } else if (t1.score() > t2.score()) {
+		return 1;
+	    } else {
+		return 0;
+	    }
+	}
+    }
+    private Set<Cluster> probeFullPropagation(Set<Cluster> allShowerComponents, ScoredLink firstLink, Cluster miniSeed, List<ScoredLink> potentialLinks) {
+	Set<Cluster> output = new HashSet<Cluster>();
+	Cluster counterpart = firstLink.counterpart(miniSeed);
+	double threshold = firstLink.score();
+	output.add(counterpart);
+	int numAddedLastPass = 1;
+	while (numAddedLastPass > 0) {
+	    Set<Cluster> clustersToAddThisPass = new HashSet<Cluster>();
+	    for (Cluster clus : output) {
+		for (ScoredLink testLink : potentialLinks) {
+		    if (testLink.contains(clus) && testLink.score() >= threshold) { 
+			// Link involves a cluster we've added and has score >= threshold
+			// so it may point to something we should add.
+			// (Test is >= rather than > because of case where both are 1.0)
+			Cluster potentialNewCluster = testLink.counterpart(clus);
+			if (!allShowerComponents.contains(potentialNewCluster) && !output.contains(potentialNewCluster) && !clustersToAddThisPass.contains(potentialNewCluster)) {
+			    // We haven't used this one yet -- should add it.
+			    clustersToAddThisPass.add(potentialNewCluster);
+			}
+		    }
+		}
+	    }
+	    // Done with this pass -- if we didn't add anything, we'll stop.
+	    numAddedLastPass = clustersToAddThisPass.size();
+	    output.addAll(clustersToAddThisPass);
+	}
+	return output;
+    }
+
+    ///////////////////////////////////////
+
+    double deltaEnergy(Cluster mainCluster, Cluster sharedCluster, double weight) {
+	double energyWithoutSharedCluster = energy(mainCluster);
+	List<Cluster> both = new Vector<Cluster>();
+	both.add(mainCluster);
+	both.add(sharedCluster);
+	double energyWithSharedCluster = energy(both);
+	return weight * (energyWithSharedCluster-energyWithoutSharedCluster);
+    }
+    double deltaEnergy(Cluster mainCluster, SharedClusterGroup shares) {
+	List<SharedCluster> sharedClusters = shares.listAllSharedClusters();
+	Set<Cluster> subClusters = recursivelyFindSubClusters(mainCluster);
+	double sumDeltaEnergy = 0.0;
+	for (SharedCluster sharedCluster : sharedClusters) {
+	    double sumWeightForThisSharedCluster = 0.0;
+	    Set<Cluster> targetsInSharedCluster = sharedCluster.getTargetClusters();
+	    for (Cluster targetInSharedCluster : targetsInSharedCluster) {
+		if (subClusters.contains(targetInSharedCluster)) {
+		    sumWeightForThisSharedCluster += sharedCluster.getNormalizedWeight(targetInSharedCluster);
+		}
+	    }
+	    double deltaEnergyForThisSharedCluster = deltaEnergy(mainCluster, sharedCluster.getCluster(), sumWeightForThisSharedCluster);
+	    sumDeltaEnergy += deltaEnergyForThisSharedCluster;
+	}
+	return sumDeltaEnergy;
+    }
+    double energy(Collection<Cluster> mainClusterList, List<SharedClusterGroup> listOfShares) {
+	Cluster mainCluster = makeCombinedCluster(mainClusterList);
+	return energy(mainCluster, listOfShares);
+    }
+    double energy(Cluster mainCluster, List<SharedClusterGroup> listOfShares) {
+	double sumDeltaEnergy = 0.0;
+	for (SharedClusterGroup share : listOfShares) {
+	    sumDeltaEnergy += deltaEnergy(mainCluster, share);
+	}
+	return energy(mainCluster) + sumDeltaEnergy;
+    }
+    private interface ClusterSharingAlgorithm {
+	public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets);
+    }
+    private class SharedClusterGroup {
+	List<SharedCluster> m_sharedClusters;
+	Map<Cluster, List<SharedCluster>> m_hints;
+	ClusterSharingAlgorithm m_algorithm = null;
+	// Setup
+	public SharedClusterGroup(List<Cluster> clustersToShare, ClusterSharingAlgorithm algorithm) {
+	    // Don't initialize hints map -- that gets created when we do rebuildHints()
+	    m_sharedClusters = new Vector<SharedCluster>();
+	    m_algorithm = algorithm;
+	    for (Cluster clusterToShare : clustersToShare) {
+		m_sharedClusters.add(new SharedCluster(clusterToShare));
+	    }
+	}
+	// Create shares based on algorithm
+	public void createShares(List<Cluster> clusterTargets) {
+	    for (SharedCluster clusterToShare : m_sharedClusters) {
+		Map<Cluster,Double> shares = m_algorithm.shareCluster(clusterToShare.getCluster(), clusterTargets);
+		for (Cluster target : shares.keySet()) {
+		    clusterToShare.addShare(target, shares.get(target));
+		}
+	    }
+	}
+	// Rebuild hints table (used for lookups)
+	public void rebuildHints() {
+	    m_hints = new HashMap<Cluster, List<SharedCluster>>();
+	    for (SharedCluster share : m_sharedClusters) {
+		Set<Cluster> targets = share.getTargetClusters();
+		for (Cluster target : targets) {
+		    List<SharedCluster> matchedShares = m_hints.get(target);
+		    if (matchedShares == null) {
+			matchedShares = new Vector<SharedCluster>();
+			m_hints.put(target, matchedShares);
+		    }
+		    matchedShares.add(share);
+		}
+	    }
+	}
+	// Look up shares corresponding to a target cluster, relying on cache.
+	public List<SharedCluster> findContributingSharedClusters(Cluster target) {
+	    return m_hints.get(target);
+	}
+	// List target clusters with contributions:
+	public Set<Cluster> findTargets() {
+	    return m_hints.keySet();
+	}
+	// List all shares
+	public List<SharedCluster> listAllSharedClusters() {
+	    return m_sharedClusters;
+	}
+    }
+    private class SharedCluster {
+	Cluster m_rawCluster = null;
+	Map<Cluster, Double> m_sharedBetween = null;
+	double m_sumOfWeights = 0.0;
+	public SharedCluster(Cluster rawCluster) {
+	    m_rawCluster = rawCluster;
+	    m_sumOfWeights = 0.0;
+	    m_sharedBetween = new HashMap<Cluster,Double>();
+	}
+	public void addShare(Cluster target, double weight) {
+	    m_sharedBetween.put(target, new Double(weight));
+	    m_sumOfWeights += weight;
+	}
+	public Cluster getCluster() { return m_rawCluster; }
+	public Set<Cluster> getTargetClusters() {
+	    return m_sharedBetween.keySet();
+	}
+	public Double getRawWeight(Cluster target) {
+	    Double weight = m_sharedBetween.get(target);
+	    if (weight != null) {
+		if (weight <= 0.0) { throw new AssertionError("ERROR: Invalid weight = "+weight); }
[truncated at 1000 lines; 138 more skipped]
CVSspam 0.2.8