Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDriver.java+1189-14071.4 -> 1.5
MJC: Rewrite of reclustering driver

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.4 -> 1.5
diff -u -r1.4 -r1.5
--- ReclusterDriver.java	10 Nov 2007 02:10:11 -0000	1.4
+++ ReclusterDriver.java	19 Nov 2007 22:19:02 -0000	1.5
@@ -1,6 +1,8 @@
 package org.lcsim.contrib.uiowa;
 
 import java.util.*; 
+import java.io.IOException; 
+import hep.aida.*;
 import hep.physics.vec.*;
 import hep.physics.particle.properties.*;
 import org.lcsim.util.*;
@@ -17,10 +19,29 @@
 import org.lcsim.recon.cluster.structural.likelihood.*;
 import org.lcsim.recon.cluster.structural.*;
 
+/**
+  * An algorithm to recluster showers using E/p information
+  * from tracks to constrain and direct the clustering.
+  * Also makes use of topological/geometric information.
+  * 
+  * This module assumes that quite a bit of preliminary
+  * clustering has already been done. See arguments to
+  * the constructor for details.
+  *
+  * This version is PRELIMINARY.
+  *
+  * @version $Id: ReclusterDriver.java,v 1.5 2007/11/19 22:19:02 mcharles Exp $
+  * @author Mat Charles
+  */
+
 public class ReclusterDriver extends Driver {
 
+    List<Cluster> m_mips;
+
     boolean m_megaDebug = false;
     boolean m_debug = true;
+    boolean m_debugTiming = false;
+    boolean m_debugLinkScores = true;
     String m_outputParticleListName = "ReclusteredParticles";
     String m_mcList;
     String m_inputSmallClusters;
@@ -50,12 +71,37 @@
 	m_inputClumps = clumps;
 	m_inputSplitSkeletonClusters = splitSkeletonClusters;
 
-	// For now, hard-coded
-	LocalHelixExtrapolationTrackClusterMatcher genMatch = new LocalHelixExtrapolationTrackClusterMatcher();
+	// Track-matching is complex. Use several matchers...
+	// Try matching with local helix extrap to MIP or generic cluster:
 	LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher();
+	LocalHelixExtrapolationTrackClusterMatcher genMatch = new LocalHelixExtrapolationTrackClusterMatcher();
+	DualActionTrackClusterMatcher localHelixMatchers = new DualActionTrackClusterMatcher(mipMatch, genMatch);
 	add(mipMatch);
 	add(genMatch);
-	m_trackClusterMatcher = new DualActionTrackClusterMatcher(mipMatch, genMatch);
+	// Try matching with full swimming to MIP or generic cluster:
+	SimpleTrackMIPClusterMatcher mipMatchSimple = new SimpleTrackMIPClusterMatcher();
+	SimpleTrackClusterMatcher genMatchSimple = new SimpleTrackClusterMatcher();
+	DualActionTrackClusterMatcher simpleMatchers = new DualActionTrackClusterMatcher(mipMatchSimple, genMatchSimple);
+	add(mipMatchSimple);
+	add(genMatchSimple);
+	// Combine:
+	SequentialTrackClusterMatcher combinedTrackClusterMatcher = new SequentialTrackClusterMatcher();
+	combinedTrackClusterMatcher.addMatcher(localHelixMatchers);
+	combinedTrackClusterMatcher.addMatcher(simpleMatchers);
+	m_trackClusterMatcher = combinedTrackClusterMatcher;
+	
+	// Calibration
+	FuzzyNeutralHadronClusterEnergyCalculator neutralCalib = new FuzzyNeutralHadronClusterEnergyCalculator();
+	neutralCalib.setMinimumAllowedEnergy(0.0);
+	m_neutralCalib = neutralCalib;
+	ChargedHadronClusterEnergyCalculator chargedCalibration = new ChargedHadronClusterEnergyCalculator("mips", neutralCalib);
+	m_chargedCalib = chargedCalibration;
+	add(chargedCalibration);
+	FuzzyPhotonClusterEnergyCalculator photonCalib = new FuzzyPhotonClusterEnergyCalculator();
+	m_photonCalib = photonCalib;
+
+	// plots
+	initPlots();
     }
 
     public void process(EventHeader event) {
@@ -74,6 +120,8 @@
 	HitMap unusedHits = ((HitMap)(event.get(m_inputUnusedHits)));
 	List<Cluster> splitSkeletonClusters = event.get(Cluster.class, m_inputSplitSkeletonClusters);
 	
+	m_mips = mips;
+
 	System.out.println("DEBUG: ReclusterDriver read in "+trackList.size()+" tracks");
 	System.out.println("DEBUG: ReclusterDriver read in "+muonParticles.size()+" muons");
 	System.out.println("DEBUG: ReclusterDriver read in "+photonClusters.size()+" photons");
@@ -207,8 +255,17 @@
 	    // First try excluding the small clusters, which are often just single hits:
 	    Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, mixedClusters);
 	    if (matchedCluster == null) {
-		// Now retry with all:
+		// Now retry including the small clusters:
 		matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, mixedClustersAndHits);
+		if (matchedCluster == null) {
+		    // Now retry with single hits
+		    matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons);
+		    if (matchedCluster != null) {
+			// Better turn that into a small cluster!
+			wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons.remove(matchedCluster);
+			smallClustersWithoutSkeletons.add(matchedCluster);
+		    }
+		}
 	    }
 	    if (matchedCluster != null) {
 		tracksMatchedToClusters.put(tr, matchedCluster);
@@ -251,6 +308,42 @@
 		printme += " -- extrapolation point at x="+point.x()+", y="+point.y()+", z="+point.z()+", r="+r;
 	    }
 	    System.out.println(printme);
+	    if (point != null) {
+		System.out.println("Dumping ECAL hits from this particle:");
+		MCParticle trackTruth = ((BaseTrackMC)(tr)).getMCParticle();
+		List<CalorimeterHit> EcalBarrDigiHits = event.get(CalorimeterHit.class, "EcalBarrDigiHits");
+		List<CalorimeterHit> EcalEndcapDigiHits  = event.get(CalorimeterHit.class, "EcalEndcapDigiHits");
+		List<SimCalorimeterHit> EcalHits = new Vector<SimCalorimeterHit>();
+		for (CalorimeterHit hit : EcalBarrDigiHits) { EcalHits.add((SimCalorimeterHit)(hit)); }
+		for (CalorimeterHit hit : EcalEndcapDigiHits) { EcalHits.add((SimCalorimeterHit)(hit)); }
+		for (SimCalorimeterHit hit : EcalHits) {
+		    int mcCount = hit.getMCParticleCount();
+		    boolean matchedTruth = false;
+		    for (int i=0; i<mcCount; i++) {
+			MCParticle mc = hit.getMCParticle(i);
+			if (mc == trackTruth) {
+			    matchedTruth = true;
+			    break;
+			}
+		    }
+		    Hep3Vector hitPos = new BasicHep3Vector(hit.getPosition());
+		    Hep3Vector displacement = VecOp.sub(hitPos, point);
+		    double r = Math.sqrt(hitPos.x()*hitPos.x() + hitPos.y()*hitPos.y());
+		    double separation = displacement.magnitude();
+		    if (separation < 50.0) {
+			String collection = "Collection=";
+			for (Cluster clus : mips) { if (clus.getCalorimeterHits().contains(hit)) { collection += "MIPs,"; break; } }
+			for (Cluster clus : clumps) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Clumps,"; break; } }
+			for (Cluster clus : photonClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Photons,"; break; } }
+			for (Cluster clus : largeClustersWithoutSkeletons) { if (clus.getCalorimeterHits().contains(hit)) { collection += "LargeClusWithoutSkel,"; break; } }
+			for (Cluster clus : smallClustersWithoutSkeletons) { if (clus.getCalorimeterHits().contains(hit)) { collection += "SmallClusWithoutSkel,"; break; } }
+			if (unusedHits.values().contains(hit)) { collection += "UnusedHits,"; }
+			for (Cluster clus : largeClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "LargeClus,"; break; } }
+			for (Cluster clus : skeletonClusters) { if (clus.getCalorimeterHits().contains(hit)) { collection += "Skel,"; break; } }
+			System.out.println("   hit at r="+r+", z="+hitPos.z()+", x="+hitPos.x()+", y="+hitPos.y()+" with separation="+displacement.magnitude()+" -- in list "+collection);
+		    }
+		}
+	    }
 	}
 
 	System.out.println("DEBUG: Here are the "+tracksMatchedToClusters.keySet().size()+" matched tracks");
@@ -318,7 +411,7 @@
 	}
 
 	// OK. Now pick out those clusters that have something connected to them...
-	Collection<Cluster> seeds = clustersMatchedToTracks.keySet();
+	Set<Cluster> seeds = clustersMatchedToTracks.keySet();
 	Map<Cluster, Double> seedClusterTrackEnergy = new HashMap<Cluster,Double>();
 	for (Cluster seed : seeds) {
 	    List<Track> seedTracks = clustersMatchedToTracks.get(seed);
@@ -388,11 +481,11 @@
 	sharedHaloHits.rebuildHints();	
 	allSharedClusters.add(sharedHaloHits);
 	System.out.println("Checking for halo sharing... "+wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons.size()+" hits -> "+sharedHaloHits.listAllSharedClusters().size()+" SharedCluster objects");	
-	debugPrint(smallClusterSeeds, sharedHaloHits, "Small", "halo");
-	debugPrint(mips, sharedHaloHits, "MIP", "halo");
-	debugPrint(clumps, sharedHaloHits, "Clump", "halo");
-	debugPrint(photonClusters, sharedHaloHits, "Photon", "halo");
-	debugPrint(largeClustersWithoutSkeletons, sharedHaloHits, "Large", "halo");
+	//debugPrint(smallClusterSeeds, sharedHaloHits, "Small", "halo");
+	//debugPrint(mips, sharedHaloHits, "MIP", "halo");
+	//debugPrint(clumps, sharedHaloHits, "Clump", "halo");
+	//debugPrint(photonClusters, sharedHaloHits, "Photon", "halo");
+	//debugPrint(largeClustersWithoutSkeletons, sharedHaloHits, "Large", "halo");
 	// Share small clusters
 	ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
 	SharedClusterGroup sharedSmallClusters = new SharedClusterGroup(smallClustersExcludingSeedsEtc, proximityAlgForSmallClusters);
@@ -404,16 +497,20 @@
 	sharedSmallClusters.rebuildHints();
 	allSharedClusters.add(sharedSmallClusters);
 	System.out.println("Checking for small cluster sharing... "+smallClustersExcludingSeedsEtc.size()+" clusters -> "+sharedSmallClusters.listAllSharedClusters().size()+" SharedCluster objects");
-	debugPrint(smallClusterSeeds, sharedSmallClusters, "Small", "halo");
-	debugPrint(mips, sharedSmallClusters, "MIP", "halo");
-	debugPrint(clumps, sharedSmallClusters, "Clump", "halo");
-	debugPrint(photonClusters, sharedSmallClusters, "Photon", "halo");
-	debugPrint(largeClustersWithoutSkeletons, sharedSmallClusters, "Large", "halo");
+	//debugPrint(smallClusterSeeds, sharedSmallClusters, "Small", "halo");
+	//debugPrint(mips, sharedSmallClusters, "MIP", "halo");
+	//debugPrint(clumps, sharedSmallClusters, "Clump", "halo");
+	//debugPrint(photonClusters, sharedSmallClusters, "Photon", "halo");
+	//debugPrint(largeClustersWithoutSkeletons, sharedSmallClusters, "Large", "halo");
 	// Cache potential links (warning, there may be a lot!)
-	List<ScoredLink> potentialLinks = new Vector<ScoredLink>();
+	resetPotentialLinks();
 	// First look for mip-clump links, only working within a large, contiguous cluster:
 	double scaleFactorTrackToTrack = 1.0;
 	double scaleFactorTrackToClump = 1.0;
+	double scaleFactorTrackToSmallSeed = 1.0;
+
+	// Links between main components (clumps, mips) within a large cluster
+	// -------------------------------------------------------------------
 	System.out.println("DEBUG: There are "+largeClusters.size()+" large clusters in the event which can contain skeletons.");
 	for (Cluster largeClus : largeClusters) {
 	    List<Cluster> subClustersMips = new Vector<Cluster>();
@@ -424,7 +521,7 @@
 		    if (clumps.contains(subsubClus)) { subClustersClumps.add(subsubClus); }
 		}		
 	    }
-	    System.out.println("DEBUG: Large cluster with "+largeClus.getCalorimeterHits().size()+" hits contains "+subClustersMips.size()+" MIPs and "+subClustersClumps.size()+" clumps.");
+	    if (m_debugLinkScores) { System.out.println("DEBUG: Large cluster with "+largeClus.getCalorimeterHits().size()+" hits contains "+subClustersMips.size()+" MIPs and "+subClustersClumps.size()+" clumps."); }
 	    // Potential mip-mip links [careful -- don't double-count]
 	    for (int i=0; i<subClustersMips.size(); i++) {
 		Cluster mip1 = subClustersMips.get(i);
@@ -433,8 +530,9 @@
 		    double likelihood = m_eval.getLinkLikelihoodTrackToTrack(mip1,mip2);
 		    double score = likelihood / scaleFactorTrackToTrack;
 		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
-		    potentialLinks.add(new ScoredLink(mip1, mip2, score));
-		    System.out.println("DEBUG:   MIP["+mip1.getCalorimeterHits().size()+"] -- MIP["+mip2.getCalorimeterHits().size()+"]  = "+score);
+		    addPotentialLink(mip1, mip2, score);
+		    //if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip1.getCalorimeterHits().size()+"] -- MIP["+mip2.getCalorimeterHits().size()+"]  = "+score); }
+		    if (m_debugLinkScores) { debugPrintLink(mip1, mip2, "MIP", "MIP", score); }
 		}
 	    }
 	    // Potential mip-clump links
@@ -443,23 +541,203 @@
 		    double likelihood = m_eval.getLinkLikelihoodTrackToClump(mip,clump);
 		    double score = likelihood / scaleFactorTrackToClump;
 		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
-		    potentialLinks.add(new ScoredLink(mip, clump, score));
-		    System.out.println("DEBUG:   MIP["+mip.getCalorimeterHits().size()+"] -- Clump["+clump.getCalorimeterHits().size()+"]  = "+score);
+		    addPotentialLink(mip, clump, score);
+		    //if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip.getCalorimeterHits().size()+"] -- Clump["+clump.getCalorimeterHits().size()+"]  = "+score); }
+		    if (m_debugLinkScores) { debugPrintLink(mip, clump, "MIP", "Clump", score); }
+		}
+	    }
+	    // Some debug info
+	    if (m_debugLinkScores) { 
+		if (subClustersMips.size()+subClustersClumps.size()==1 || subClustersMips.size()==0) {
+		    for (Cluster clus : subClustersMips)   { System.out.println("DEBUG:   Unlinkable MIP["+clus.getCalorimeterHits().size()+"]"); }
+		    for (Cluster clus : subClustersClumps) { System.out.println("DEBUG:   Unlinkable Clump["+clus.getCalorimeterHits().size()+"]"); }
+		}
+	    }
+	}
+
+	// Potential links to other things (harder to pin down...)
+	// -------------------------------------------------------
+
+	// Link from a MIP to another MIP/clump which is *not* in the same large cluster.
+	// This can happen if:
+	//    1) One is part of a secondary shower
+	//    2) Linkage across the ECAL/HCAL boundary failed
+	// MIP <--> MIP
+	for (int i=0; i<mips.size(); i++) {
+	    Cluster mip1 = mips.get(i);
+	    for (int j=i+1; j<mips.size(); j++) {
+		Cluster mip2 = mips.get(j);
+		boolean linkAlreadyExists = checkForLink(mip1, mip2);
+		if (!linkAlreadyExists) {
+		    double likelihood = m_eval.getLinkLikelihoodTrackToTrack(mip1,mip2);
+		    double score = likelihood / scaleFactorTrackToTrack;
+		    // Penalty factor for going outside main cluster, based on angular separation 
+		    Hep3Vector clus1Position = new BasicHep3Vector(mip1.getPosition());
+		    Hep3Vector clus2Position = new BasicHep3Vector(mip2.getPosition());
+		    double penaltyFactor = VecOp.dot(VecOp.unit(clus1Position), VecOp.unit(clus2Position));
+		    if (penaltyFactor > 0.8) { penaltyFactor = 0.8; }
+		    score *= penaltyFactor;
+		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
+		    addPotentialLink(mip1, mip2, score);
+		    //if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip1.getCalorimeterHits().size()+"] -- MIP["+mip2.getCalorimeterHits().size()+"]  = "+score+"  (not in same large cluster)"); }
+		    if (m_debugLinkScores) { debugPrintLink(mip1, mip2, "MIP", "MIP", score); }
+		}
+	    }
+	}
+	// MIP <--> Clump
+	for (Cluster mip : mips) {
+	    for (Cluster clump : clumps) {
+		boolean linkAlreadyExists = checkForLink(mip, clump);
+		if (!linkAlreadyExists) {
+		    double likelihood = m_eval.getLinkLikelihoodTrackToClump(mip,clump);
+		    double score = likelihood / scaleFactorTrackToClump;
+		    // Penalty factor for going outside main cluster, based on angular separation 
+		    Hep3Vector clus1Position = new BasicHep3Vector(mip.getPosition());
+		    Hep3Vector clus2Position = new BasicHep3Vector(clump.getPosition());
+		    double penaltyFactor = VecOp.dot(VecOp.unit(clus1Position), VecOp.unit(clus2Position));
+		    if (penaltyFactor > 0.8) { penaltyFactor = 0.8; }
+		    score *= penaltyFactor;
+		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
+		    addPotentialLink(mip, clump, score);
+		    //if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip.getCalorimeterHits().size()+"] -- Clump["+clump.getCalorimeterHits().size()+"]  = "+score); }
+		    if (m_debugLinkScores) { debugPrintLink(mip, clump, "MIP", "Clump", score); }
+		}
+	    }
+	}	
+
+	// Link from MIP to a nearby cluster (use pointing)
+	for (Cluster mip : mips) {
+	    double thresholdForProximity = 50.0; // 5cm. FIXME: Don't hard-code.
+	    // MIP <--> small cluster seed
+	    for (Cluster clus : smallClusterSeeds) {
+		double score = scoreOnProximityAndPointing(mip, clus, thresholdForProximity);
+		addPotentialLink(mip, clus, score);
+		//if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip.getCalorimeterHits().size()+"] -- SmallSeed["+clus.getCalorimeterHits().size()+"]  = "+score); }
+		if (m_debugLinkScores) { debugPrintLink(mip, clus, "MIP", "SmallSeed", score); }
+		if (m_debugLinkScores) { 
+		    double likelihood = m_eval.getLinkLikelihoodTrackToClump(mip,clus);
+		    double dist = proximity(mip, clus);
+		    double scaledDistance = dist / thresholdForProximity;
+		    double penaltyFactor = 1.0 / (scaledDistance * scaledDistance);
+		    double recalculatedScore = likelihood;
+		    if (penaltyFactor < 1.0) {
+			recalculatedScore *= penaltyFactor;
+		    }
+		    System.out.println("DEBUG:      proximity = "+dist+" and threshold = "+thresholdForProximity+" => penaltyFactor = "+penaltyFactor);
+		    System.out.println("DEBUG:      likelihood = "+likelihood+" => combined score = "+recalculatedScore);
+		}
+	    }
+	    // MIP <--> photon
+	    for (Cluster clus : photonClusters) {
+		boolean photonIsSeed = seeds.contains(clus);
+		double score = scoreOnProximityAndPointing(mip, clus, thresholdForProximity);
+		if (photonIsSeed) {
+		    addPotentialLink(mip, clus, score);
+		    if (m_debugLinkScores) { debugPrintLink(mip, clus, "MIP", "Photon", score); }
+		} else {
+		    String printme = new String("DEBUG:   Skipped making link from MIP["+mip.getCalorimeterHits().size()+"] -- Photon["+clus.getCalorimeterHits().size()+"] because photon is not a seed.");
+		    MCParticle domPart1 = quoteDominantParticle(mip);
+		    MCParticle domPart2 = quoteDominantParticle(clus);
+		    if (domPart1 == domPart2) {
+			int domPDG = domPart1.getPDGID();
+			double domMom = domPart1.getMomentum().magnitude();
+			printme += " MISTAKE! They both come from "+domPDG+" with p="+domMom;
+			double likelihood = m_eval.getLinkLikelihoodTrackToClump(mip,clus);
+			double distance = proximity(mip, clus);
+			printme += ". Score would have been "+score+" since likelihood="+likelihood+" and proximity="+distance;
+		    }
+		    System.out.println(printme);
+		}
+	    }
+	    // MIP <--> large cluster
+	    for (Cluster clus : largeClustersWithoutSkeletons) {
+		double score = scoreOnProximityAndPointing(mip, clus, thresholdForProximity);
+		addPotentialLink(mip, clus, score);
+		//if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip.getCalorimeterHits().size()+"] -- LargeClus["+clus.getCalorimeterHits().size()+"]  = "+score); }
+		if (m_debugLinkScores) { debugPrintLink(mip, clus, "MIP", "LargeClus", score); }
+	    }
+	}
+	// Link certain things based on proximity and angle when there is
+	// little other information. Use these quantities:
+	//   1) Proximity
+	//   2) Dot product of these vectors: (IP to inner clus) and (inner clus to outer clus)
+	// For genuinely large clusters (e.g. clumps with 50 hits) we also have pointing information
+	for (Cluster smallSeed : smallClusterSeeds) {
+	    Hep3Vector smallSeedPosition = new BasicHep3Vector(smallSeed.getPosition());
+	    double thresholdForProximity = 50.0; // 5cm. FIXME: Don't hard-code.
+	    // Small seeds <--> clumps
+	    for (Cluster clus : clumps) {
+		double score = scoreOnProximityAndAngle(smallSeed, clus, thresholdForProximity);
+		addPotentialLink(smallSeed, clus, score);
+		//if (m_debugLinkScores) { System.out.println("DEBUG:   SmallSeed["+smallSeed.getCalorimeterHits().size()+"] -- Clump["+clus.getCalorimeterHits().size()+"]  = "+score); }
+		if (m_debugLinkScores) { debugPrintLink(smallSeed, clus, "SmallSeed", "Clump", score); }
+	    }
+	    // Small seeds <--> photons
+	    for (Cluster clus : photonClusters) {
+		double score = scoreOnProximityAndAngle(smallSeed, clus, thresholdForProximity);
+		addPotentialLink(smallSeed, clus, score);
+		//if (m_debugLinkScores) { System.out.println("DEBUG:   SmallSeed["+smallSeed.getCalorimeterHits().size()+"] -- Photon["+clus.getCalorimeterHits().size()+"]  = "+score); }
+		if (m_debugLinkScores) { debugPrintLink(smallSeed, clus, "SmallSeed", "Photon", score); }
+		if (m_debugLinkScores) { 
+		    double dist = proximity(smallSeed, clus);
+		    double scaledDist = dist / thresholdForProximity;
+		    double proximityScore = 1.0 / (scaledDist * scaledDist);
+		    Hep3Vector clus1Position = new BasicHep3Vector(smallSeed.getPosition());
+		    Hep3Vector clus2Position = new BasicHep3Vector(clus.getPosition());
+		    Hep3Vector positionOfInnerCluster = null;
+		    Hep3Vector positionOfOuterCluster = null;
+		    if (clus2Position.magnitude() > clus1Position.magnitude()) {
+			positionOfInnerCluster = clus1Position;
+			positionOfOuterCluster = clus2Position;
+		    } else {
+			positionOfInnerCluster = clus2Position;
+			positionOfOuterCluster = clus1Position;
+		    }
+		    Hep3Vector displacementVector = VecOp.sub(positionOfOuterCluster, positionOfInnerCluster);
+		    double dotProduct = VecOp.dot(VecOp.unit(displacementVector), VecOp.unit(positionOfInnerCluster));
+		    double dotProductScore = dotProduct;
+		    if (dotProduct < 0.0) { dotProductScore = 0.0; }
+		    double recalculatedScore = proximityScore * dotProductScore;
+		    System.out.println("DEBUG:     proximity="+dist+" and threshold="+thresholdForProximity+" => proximityScore="+proximityScore);
+		    System.out.println("DEBUG:     dotProduct="+dotProduct+" => score="+recalculatedScore);
 		}
 	    }
+	    // Small seeds <--> large clusters
+	    for (Cluster clus : largeClustersWithoutSkeletons) {
+		double score = scoreOnProximityAndAngle(smallSeed, clus, thresholdForProximity);
+		addPotentialLink(smallSeed, clus, score);
+		//if (m_debugLinkScores) { System.out.println("DEBUG:   SmallSeed["+smallSeed.getCalorimeterHits().size()+"] -- LargeClus["+clus.getCalorimeterHits().size()+"]  = "+score); }
+		if (m_debugLinkScores) { debugPrintLink(smallSeed, clus, "SmallSeed", "LargeClus", score); }
+	    }
+	}
+	// Clump <--> clump
+	for (int i=0; i<clumps.size(); i++) {
+	    double thresholdForProximity = 75.0; // FIXME: Don't hard-code.
+	    Cluster clump1 = clumps.get(i);
+	    for (int j=i+1; j<clumps.size(); j++) {
+		Cluster clump2 = clumps.get(j);
+		double score = scoreOnProximityAndAngle(clump1, clump2, thresholdForProximity);
+		score *= 0.8; // Arbitrary penalty [FIXME]
+		addPotentialLink(clump1, clump2, score);
+		//if (m_debugLinkScores) { System.out.println("DEBUG:   Clump["+clump1.getCalorimeterHits().size()+"] -- Clump["+clump2.getCalorimeterHits().size()+"]  = "+score); }
+		if (m_debugLinkScores) { debugPrintLink(clump1, clump2, "Clump", "Clump", score); }
+	    }
+	}
+	// Clump <--> large cluster
+	for (Cluster clump : clumps) {
+	    double thresholdForProximity = 75.0; // FIXME: Don't hard-code.
+	    for (Cluster clus : largeClustersWithoutSkeletons) {
+		double score = scoreOnProximityAndAngle(clump, clus, thresholdForProximity);
+		score *= 0.8; // Arbitrary penalty [FIXME]
+		addPotentialLink(clump, clus, score);
+		//if (m_debugLinkScores) { System.out.println("DEBUG:   Clump["+clump.getCalorimeterHits().size()+"] -- LargeClus["+clus.getCalorimeterHits().size()+"]  = "+score); }
+		if (m_debugLinkScores) { debugPrintLink(clump, clus, "Clump", "LargeClus", score); }
+	    }
 	}
-	// Potential links to other things...
-	//
-	// ...
-	//
 	// [other legitimate link types]
 	// [...]
 	// OK. Now sort the potential links by score:
-	Collections.sort(potentialLinks, Collections.reverseOrder(new ScoredLinkSort()));
-	System.out.println("Rerecluster: There are "+potentialLinks.size()+" potential links:");
-	for (ScoredLink link : potentialLinks) {
-	    //System.out.println("  Link with score="+link.score());
-	}
+	sortLinks();
 	// Try to build skeletons, working with tracks in momentum-sorted order...
 	Map<Track,Double> newMapTrackToThreshold = new HashMap<Track,Double>(); // for geometrical checks
 	Map<Track,Double> newMapTrackToTolerance = new HashMap<Track,Double>(); // for E/p checks
@@ -470,30 +748,44 @@
 	Map<Cluster, Track> newMapShowerComponentToTrack = null;
 	Map<Track, Set<Cluster>> newMapTrackToShowerComponents = null;
 	Map<Track, Set<Cluster>> newMapTrackToVetoedAdditions = null;
-	for (int iIter=0; iIter<5; iIter++) {
+	long timeAtStartOfFirstEventIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
+	for (int iIter=0; iIter<10; iIter++) {
+	    long timeAtStartOfEventIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
 	    newMapShowerComponentToTrack = new HashMap<Cluster, Track>();
 	    newMapTrackToShowerComponents = new HashMap<Track, Set<Cluster>>();
 	    newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
+	    long timeAtStartOfEventTracksIteration= Calendar.getInstance().getTimeInMillis(); // DEBUG
 	    for (Track tr : tracksSortedByMomentum) {
 		Set<Cluster> vetoedAdditions = new HashSet<Cluster>();
-		Set<Cluster> showerComponents = iterateOnOneTrack(tr, tracksMatchedToClusters, potentialLinks, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditions);
+		long timeAtStartOfTrackIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
+		Set<Cluster> showerComponents = iterateOnOneTrack(tr, tracksMatchedToClusters, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditions);
+		long timeAtEndOfTrackIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
+		if (m_debugTiming) { System.out.println("DEBUG: iterateOnOneTrack() took "+(timeAtEndOfTrackIteration-timeAtStartOfTrackIteration)+" ms."); }
 		newMapTrackToShowerComponents.put(tr, showerComponents);
 		for (Cluster clus : showerComponents) {
 		    newMapShowerComponentToTrack.put(clus, tr);
 		}
 		newMapTrackToVetoedAdditions.put(tr, vetoedAdditions);
 	    }
+	    long timeAtEndOfEventTracksIteration= Calendar.getInstance().getTimeInMillis(); // DEBUG
+	    if (m_debugTiming) { System.out.println("DEBUG: In event-level iteration "+iIter+", looping over "+tracksSortedByMomentum.size()+" tracks took "+(timeAtEndOfEventTracksIteration-timeAtStartOfEventTracksIteration)+" ms (plus "+(timeAtStartOfEventTracksIteration-timeAtStartOfEventIteration)+" ms prep time)"); }
+
 	    // Look for tracks which
 	    //   * Have E/p significantly below 1, and/or
 	    //   * Could be linked to a cluster but were vetoed, PROVIDED THAT the cluster was not put in any other shower
 	    Set<Track> tracksWithEoverPTooLow = new HashSet<Track>();
 	    Set<Track> tracksWithVetoedLinkToUnusedCluster = new HashSet<Track>();
 
-	    printStatus("SUMMARY FOR THIS ITERATION:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, photonClusters, mips, clumps, largeClustersWithoutSkeletons, newMapTrackToVetoedAdditions);
+	    //printStatus("SUMMARY FOR THIS ITERATION:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, photonClusters, mips, clumps, largeClustersWithoutSkeletons, smallClusterSeeds, newMapTrackToVetoedAdditions);
 
+	    long timeBeforeComputingResiduals = Calendar.getInstance().getTimeInMillis(); // DEBUG
 	    for (Track tr : tracksSortedByMomentum) {
+		long timeAtStartOfThisTrackResidualCheck = Calendar.getInstance().getTimeInMillis(); // DEBUG
 		Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+		long timeAtStartOfThisTrackResidualCheck_EnergyCalculation = Calendar.getInstance().getTimeInMillis(); // DEBUG
 		double clusterEnergy = energy(showerComponents, allSharedClusters);
+		long timeAtEndOfThisTrackResidualCheck_EnergyCalculation = Calendar.getInstance().getTimeInMillis(); // DEBUG
+		if (m_debugTiming) { System.out.println("DEBUG: Computing residuals for this track: Energy calculation took "+(timeAtEndOfThisTrackResidualCheck_EnergyCalculation-timeAtStartOfThisTrackResidualCheck_EnergyCalculation)+" ms"); }
 		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
 		double sigma = 0.7 * Math.sqrt(trackMomentum);
 		double normalizedResidual = (clusterEnergy-trackMomentum)/sigma;
@@ -501,7 +793,11 @@
 		    // FIXME: This cut-off shouldn't be hard-coded
 		    tracksWithEoverPTooLow.add(tr);
 		}
+		long timeAtEndOfThisTrackResidualCheck = Calendar.getInstance().getTimeInMillis(); // DEBUG
+		if (m_debugTiming) { System.out.println("DEBUG: Computing residuals for this track took "+(timeAtEndOfThisTrackResidualCheck-timeAtStartOfThisTrackResidualCheck)+" ms"); }
 	    }
+	    long timeBeforeLookingForVetoedLinks = Calendar.getInstance().getTimeInMillis(); // DEBUG
+	    if (m_debugTiming) { System.out.println("DEBUG: Computing residuals for all tracks took "+(timeBeforeLookingForVetoedLinks-timeBeforeComputingResiduals)+" ms (plus "+(timeBeforeComputingResiduals-timeAtEndOfEventTracksIteration)+" ms prep time)"); }
 	    for (Cluster clus : linkableClusters) {
 		if (newMapShowerComponentToTrack.get(clus) == null) {
 		    Set<Track> tracksVetoedForThisCluster = new HashSet<Track>();
@@ -513,741 +809,302 @@
 		    
 		}
 	    }
+
+	    // Adjust tolerance (how wrong in E/p we'll allow a cluster addition to make us)
+	    // and threshold (how low in score we'll go for adding a link)
+	    double maximumAllowedTolerance = 2.5;
+	    double minimumAllowedThreshold = 0.3;
+	    boolean stateChange = false;
+
+	    long timeBeforeLookingForTracksWithVetoedLinksToUnusedClusters = Calendar.getInstance().getTimeInMillis(); // DEBUG
+	    if (m_debugTiming) { System.out.println("DEBUG: Looking for vetoed links took "+(timeBeforeLookingForTracksWithVetoedLinksToUnusedClusters-timeBeforeLookingForVetoedLinks)+" ms"); }
 	    for (Track tr : tracksWithVetoedLinkToUnusedCluster) {
 		Double oldTolerance = newMapTrackToTolerance.get(tr);
 		double newTolerance = oldTolerance.doubleValue() + 0.25;
-		newMapTrackToTolerance.put(tr, newTolerance);
+		if (newTolerance <= maximumAllowedTolerance) {
+		    newMapTrackToTolerance.put(tr, newTolerance);
+		    stateChange = true;
+		}
 	    }
+	    long timeBeforeAdjustingTrackThresholds = Calendar.getInstance().getTimeInMillis(); // DEBUG
+	    if (m_debugTiming) { System.out.println("DEBUG: Updating track tolerances took "+(timeBeforeAdjustingTrackThresholds-timeBeforeLookingForTracksWithVetoedLinksToUnusedClusters)+" ms"); }
 	    for (Track tr : tracksWithEoverPTooLow) {
 		Double oldThreshold = newMapTrackToThreshold.get(tr);
 		double newThreshold = oldThreshold.doubleValue() - 0.05;
-		newMapTrackToThreshold.put(tr, new Double(newThreshold));
+		if (newThreshold >= minimumAllowedThreshold) {
+		    newMapTrackToThreshold.put(tr, new Double(newThreshold));
+		    stateChange = true;
+		}
 	    }
-	    // NOW: Go back and recluster with lower/higher threshold if below/above target energy.
-	}
-
-	printStatus("FINAL STATUS:",
-		    tracksSortedByMomentum,
-		    allSharedClusters,
-		    newMapTrackToShowerComponents,
-		    newMapShowerComponentToTrack,
-		    newMapTrackToThreshold,
-		    newMapTrackToTolerance,
-		    photonClusters, mips, clumps, largeClustersWithoutSkeletons,
-		    newMapTrackToVetoedAdditions
-		    );
-	////////////////////////////////////////////////////////////////////////
-
-	// Set up initial thresholds
-	Map<Cluster,Double> mapSeedsToThresholds = new HashMap<Cluster,Double>();
-	for (Cluster seed : seeds) {
-	    mapSeedsToThresholds.put(seed, new Double(0.8));
-	}
+	    long timeAtEndOfEventIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
+	    if (m_debugTiming) { System.out.println("DEBUG: Updating track thresholds took "+(timeAtEndOfEventIteration-timeBeforeAdjustingTrackThresholds)+" ms"); }
+	    if (m_debugTiming) { System.out.println("DEBUG: Updates of track/cluster info took "+(timeAtEndOfEventIteration-timeAtEndOfEventTracksIteration)+" ms, so total time for event-level iteration "+iIter+" was "+(timeAtEndOfEventIteration-timeAtStartOfEventIteration)+" ms"); }
 
-	// Need to add other clusters to them somewhat intelligently...
-
-	Map<Cluster,Cluster> mapPhotonClustersToSeeds = new HashMap<Cluster,Cluster>();
-	Map<Cluster,Cluster> mapSmallClustersToSeeds = new HashMap<Cluster,Cluster>();
-	Map<Cluster,Cluster> mapLargeClustersToSeeds = new HashMap<Cluster,Cluster>();
-	Map<Cluster,Cluster> mapMipsToSeeds = new HashMap<Cluster,Cluster>();
-	Map<Cluster,Cluster> mapClumpsToSeeds = new HashMap<Cluster,Cluster>();
-	Map<Cluster,Cluster> mapHitsToSeeds = new HashMap<Cluster,Cluster>();
-	
-	for (Cluster clus : photonClusters) {
-	    if (seeds.contains(clus)) {
-		mapPhotonClustersToSeeds.put(clus,clus);
-	    } else {
-		mapPhotonClustersToSeeds.put(clus,null);
-	    }
-	}
-	for (Cluster clus : smallClusters) { 
-	    if (seeds.contains(clus)) {
-		mapSmallClustersToSeeds.put(clus,clus);
-	    } else {
-		mapSmallClustersToSeeds.put(clus,null);
-	    }
-	}
-	for (Cluster clus : largeClustersWithoutSkeletons) { 
-	    if (seeds.contains(clus)) {
-		mapLargeClustersToSeeds.put(clus,clus);
-	    } else {
-		mapLargeClustersToSeeds.put(clus,null);
-	    }
-	}
-	for (Cluster clus : mips) { 
-	    if (seeds.contains(clus)) {
-		mapMipsToSeeds.put(clus,clus);
-	    } else {
-		mapMipsToSeeds.put(clus,null);
-	    }
-	}
-	for (Cluster clus : clumps) { 
-	    if (seeds.contains(clus)) {
-		mapClumpsToSeeds.put(clus,clus);
-	    } else {
-		mapClumpsToSeeds.put(clus,null);
-	    }
-	}
-	for (Cluster clus : wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons) { 
-	    if (seeds.contains(clus)) {
-		mapHitsToSeeds.put(clus,clus);
-	    } else {
-		mapHitsToSeeds.put(clus,null);
+	    if (!stateChange) {
+		// Reached steady state => stop
+		break;
 	    }
 	}
+	long timeAtEndOfLastEventIteration = Calendar.getInstance().getTimeInMillis(); // DEBUG
+	if (m_debugTiming) { System.out.println("DEBUG: Iterating multiple times on all tracks in event took a grand total of "+(timeAtEndOfLastEventIteration-timeAtStartOfFirstEventIteration)+" ms"); }
 
-
-
-
-
-	//printStatus("Initial state:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
-	writeClusters("recluster00", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
-
-	// First pass: assign unambiguous split skeletons
-	for (Cluster skel : splitSkeletonClusters) {
-	    List<Cluster> seedsInSkeleton = new Vector<Cluster>();
-	    for (Cluster clus : skel.getClusters()) {
-		if (seeds.contains(clus)) {
-		    seedsInSkeleton.add(clus);
+	// Look for cases where we can see something went wrong and fix it.
+	// The easiest instance is where:
+	//   * A cluster C is not connected to any track
+	//   * The best potential link for cluster C was to a track T
+	//   * The E/p for track T is currently too low
+	//   * The E/p for track T would be OK if cluster C was added
+	// We could also generalize this to a group of clusters (C1+C2+...+Cn) to be connected to T.
+	boolean noOverridesOnPreviousPass = false;
+	noOverridesOnPreviousPass = true; // DISABLE OVER-RIDES
+	while (!noOverridesOnPreviousPass) {
+	    noOverridesOnPreviousPass = true;
+	    for (Cluster clus : linkableClusters) {
+		if (photonClusters.contains(clus)) {
+		    // Don't eat photons
+		    continue;
 		}
-	    }
-	    if (seedsInSkeleton.size() == 1) {
-		// unique
-		Cluster seed = seedsInSkeleton.get(0);
-		for (Cluster clus : skel.getClusters()) {
-		    if (mips.contains(clus)) {
-			mapMipsToSeeds.put(clus, seed);
-		    } else if (clumps.contains(clus)) {
-			mapClumpsToSeeds.put(clus, seed);
-		    } else { 
-			throw new AssertionError("Internal consistency failure");
+		if (newMapShowerComponentToTrack.get(clus) == null) {
+		    // Cluster with no tracks connected.
+		    MCParticle domPartOfClus = quoteDominantParticle(clus);
+		    int domPDG = domPartOfClus.getPDGID();
+		    double domMom = domPartOfClus.getMomentum().magnitude();
+		    System.out.println("DEBUG: Considering a link over-ride for cluster ["+clus.getCalorimeterHits().size()+" hits] with no track.");
+		    // What was the best-scoring link available?
+		    List<ScoredLink> potentialLinks = m_potentialLinks.get(clus);
+		    if (potentialLinks != null && potentialLinks.size()>0) {
+			ScoredLink bestLink = potentialLinks.get(0);
+			Cluster matchedClusterOfBestLink = bestLink.counterpart(clus);
+			Track trackOfMatchedClusterOfBestLink = newMapShowerComponentToTrack.get(matchedClusterOfBestLink);
+			System.out.println("DEBUG: Potential link from cluster ["+clus.getCalorimeterHits().size()+" hits] to a cluster ["+matchedClusterOfBestLink.getCalorimeterHits().size()+" hits] with score="+bestLink.score());
+			if (trackOfMatchedClusterOfBestLink != null) {
+			    // OK, there's a track. We might have vetoed this for E/p before... check that
+			    double trackMomentum = new BasicHep3Vector(trackOfMatchedClusterOfBestLink.getMomentum()).magnitude();
+			    if (newMapTrackToVetoedAdditions.get(trackOfMatchedClusterOfBestLink).contains(clus)) {
+				// We vetoed it before -- ignore it for now
+				System.out.println("DEBUG: Potential link from cluster ["+clus.getCalorimeterHits().size()+" hits] to a cluster ["+matchedClusterOfBestLink.getCalorimeterHits().size()+" hits] vetoed due to E/p");
+			    } else {
+				// OK, good to go.
+				Set<Cluster> currentShowerOfTrack = newMapTrackToShowerComponents.get(trackOfMatchedClusterOfBestLink);
+				Set<Cluster> newShowerOfTrack = new HashSet<Cluster>(currentShowerOfTrack);
+				newShowerOfTrack.add(clus);
+				double energyOfExistingShower = energy(currentShowerOfTrack, allSharedClusters);
+				double energyOfNewShower = energy(newShowerOfTrack, allSharedClusters);
+				double tolerance = newMapTrackToTolerance.get(trackOfMatchedClusterOfBestLink).doubleValue();
+				System.out.println("DEBUG: Potential link from cluster ["+clus.getCalorimeterHits().size()+" hits] to a cluster ["+matchedClusterOfBestLink.getCalorimeterHits().size()+" hits] would move shift energy "+energyOfExistingShower+" --> "+energyOfNewShower+" (p="+trackMomentum);
+				if (testEoverP(energyOfNewShower, trackOfMatchedClusterOfBestLink, tolerance)) {
+				    // OK, add it
+				    newMapTrackToShowerComponents.put(trackOfMatchedClusterOfBestLink, newShowerOfTrack);
+				    newMapShowerComponentToTrack.put(clus, trackOfMatchedClusterOfBestLink);
+				    noOverridesOnPreviousPass = false;
+				    MCParticle trackTruth = ((BaseTrackMC)(trackOfMatchedClusterOfBestLink)).getMCParticle();
+				    String mistake = new String(""); // Default: not a mistake
+				    if (domPartOfClus != trackTruth) { 
+					mistake += " -- MISTAKE"; 
+					boolean clusterComesFromReconstructedTrack = false;
+					for (Track eachTrack : tracksMatchedToClusters.keySet()) {
+					    MCParticle truthForEachTrack = ((BaseTrackMC)(eachTrack)).getMCParticle();
+					    if (domPartOfClus == truthForEachTrack) {
+						clusterComesFromReconstructedTrack = true;
+						break;
+					    }
+					}
+					if (clusterComesFromReconstructedTrack) {
+					    mistake += " [but no cost]";
+					}
+				    }
+				    System.out.println("DEBUG: Over-rode scoring to make a link: Track with p="+trackMomentum+" to clus with "+clus.getCalorimeterHits().size()+" hits from "+domPDG+" with p="+domMom+mistake);
+				}
+			    }
+			}
 		    }
 		}
 	    }
 	}
-
-	//printStatus("After first pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
-	writeClusters("recluster01", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
-
-	// Second pass: attach small clusters and hits to nearby seeds OR skeleton components OR large clusters OR photon clusters
-	// Create maps from small thing to big thing
-	double threshold = 50.0;
-	Map<Cluster, Cluster> mapAttachSmallClusters = new HashMap<Cluster,Cluster>();
-	Map<Cluster, Cluster> mapAttachHits = new HashMap<Cluster,Cluster>();
-	Map<List<Cluster>, Map<Cluster,Cluster>> targetMap = new HashMap<List<Cluster>, Map<Cluster,Cluster>>(); // e.g. from "mips" to "mapMipsToSeeds"
-	targetMap.put(mips, mapMipsToSeeds);
-	targetMap.put(clumps, mapClumpsToSeeds);
-	targetMap.put(largeClustersWithoutSkeletons, mapLargeClustersToSeeds);
-	targetMap.put(photonClusters, mapPhotonClustersToSeeds);
-	List<Cluster> targets = new Vector<Cluster>();
-	for (List<Cluster> targetList : targetMap.keySet()) {
-	    targets.addAll(targetList);
-	}
-
-	for (Cluster clus : smallClusters) {
-	    double minDist = -1.0;
-	    Cluster nearest = null;
-	    for (Cluster target : targets) {
-		double dist = proximity(clus, target);
-		if (nearest == null || dist < minDist) {
-		    minDist = dist;
-		    nearest = target;
-		}
-	    }
-	    if (minDist < threshold) {
-		mapAttachSmallClusters.put(clus, nearest);
-	    }
-	}
-	for (Cluster clus : wrappedUnusedHitsIgnoringLargeClustersWithoutSkeletons) {
-	    double minDist = -1.0;
-	    Cluster nearest = null;
-	    for (Cluster target : targets) {
-		double dist = proximity(clus, target);
-		if (nearest == null || dist < minDist) {
-		    minDist = dist;
-		    nearest = target;
-		}
-	    }
-	    if (minDist < threshold) {
-		mapAttachHits.put(clus, nearest);
-	    }
-	}
-
-	for (Cluster clus : mapAttachSmallClusters.keySet()) {
-	    Cluster target = mapAttachSmallClusters.get(clus);
-	    for (List<Cluster> targetList : targetMap.keySet()) {
-		if (targetList.contains(target)) {
-		    mapSmallClustersToSeeds.put(clus, targetMap.get(targetList).get(target)); // maps clus to [seed of [target of [clus] ] ]
-		}
-	    }
-	}
-
-	for (Cluster clus : mapAttachHits.keySet()) {
-	    Cluster target = mapAttachHits.get(clus);
-	    for (List<Cluster> targetList : targetMap.keySet()) {
-		if (targetList.contains(target)) {
-		    mapHitsToSeeds.put(clus, targetMap.get(targetList).get(target)); // maps clus to [seed of [target of [clus] ] ]
-		}
-	    }
-	}
-
-	//printStatus("After second pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);	
-	writeClusters("recluster02", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);	
-
-	// SCORING?
-	//
-	// Isolated hit to anything: Distance (1/r2), possibly relative position
-	// Small cluster to anything: Distance (1/r2), possibly relative position
-	// Skeleton component to skeleton component: tricky...
-	// Skeleton component to large cluster: tricky...
-	// Skeleton component to photon cluster: tricky...
-	// Large cluster to photon cluster or vice-versa: ?
-	//
-	// For small things, indirect proximity (small thing -> big thing -> seed) is OK.
-	// For big things, indirect proximity (big thing -> other big thing -> seed) is explosive.
-	//
-	// For skeleton components:
-	//   * Assign a score to each pair (e.g. MIP <--> CLUMP)
-	//   * For each valid route from seed, take product of scores,
-	//        e.g. route 1: SEED <--> CLUMP = 0.4
-	//        e.g. route 2: SEED <--> MIP <--> CLUMP = 0.9 * 1.0
-	//   * Take maximum of scores.
-	//   * For each pair, score caps at 1 -- e.g. likelihood in (1.0, 0.9) => score = 1, likelihood in (0.9, 0.0) => score = likelihood/0.9
-	//   * This approach ensures that score for each indirectly linked cluster is
-	//     always <= score for best path to get there. So if the distant cluster
-	//     is above threshold, we are guaranteed a path to it is also above threshold.
-	//
-	// To keep things somewhat bounded, need to impose distance cut-offs.
-	// Scoring normalization: 1.0 indicates a link that you'd definitely want to make. Cap at 1.0
-	//
-	// We also need to watch out for a "swamping" problem.
-	// Sometimes it makes sense to add in a subcluster even if it takes the total cluster energy
-	// over nominal. (Maybe that link is better and some of the current ones are bad.) But if the
-	// energy of the cluster to be added is obviously crazy (e.g. such that seed + new cluster alone
-	// would go out of bounds) then we should clearly not add it.
-
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	buildNeutrals(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy, mapAttachSmallClusters, mapAttachHits); // TEST
-	//printStatus("After third pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster03", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	//printStatus("After fourth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster04", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	//printStatus("After fifth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster05", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	//printStatus("After sixth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster06", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	//printStatus("After seventh pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster07", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	//printStatus("After eigth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster08", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	//printStatus("After ninth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster09", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
-	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
-	//printStatus("After tenth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("recluster10", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	// Last step needs to be to stuff in extra hits/clusters if they'll reasonably fit into
-	// a nearby charged cluster, heedless of thresholds.
-
-	fillIn(seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds);
-	//printStatus("Final charged state:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-	writeClusters("reclusterFinalCharged", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
-
-	// ... now flesh out neutral hadrons (needs to be done)
-
-	// ... now write out!
-	writeOut(seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks);
-	writeClusters("reclusterFinalAll", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 	
 
-	System.out.println("RECLUSTER: ALL DONE");
-	m_event = null;
-    }
-
-    void adjustThresholds(Map<Cluster,Double> mapSeedsToThresholds,
-			  Map<Cluster,Cluster> mapHitsToSeeds,
-			  Map<Cluster,Cluster> mapSmallClustersToSeeds,
-			  Map<Cluster,Cluster> mapLargeClustersToSeeds,
-			  Map<Cluster,Cluster> mapPhotonClustersToSeeds,
-			  Map<Cluster,Cluster> mapMipsToSeeds,
-			  Map<Cluster,Cluster> mapClumpsToSeeds,
-			  Collection<Cluster> seeds,
-			  Map<Cluster,Double> seedClusterTrackEnergy)
-    {
-	for (Cluster seed : seeds) {
-	    List<Cluster> matchedPhotons = reverseMap(seed, mapPhotonClustersToSeeds);
-	    List<Cluster> matchedSmallClusters = reverseMap(seed, mapSmallClustersToSeeds);
-	    List<Cluster> matchedLargeClusters = reverseMap(seed, mapLargeClustersToSeeds);
-	    List<Cluster> matchedMips = reverseMap(seed, mapMipsToSeeds);
-	    List<Cluster> matchedClumps = reverseMap(seed, mapClumpsToSeeds);
-	    List<Cluster> matchedHits = reverseMap(seed, mapHitsToSeeds);
-
-	    List<Cluster> combinedClusters = new Vector<Cluster>();
-	    combinedClusters.addAll(matchedPhotons);
-	    combinedClusters.addAll(matchedSmallClusters);
-	    combinedClusters.addAll(matchedLargeClusters);
-	    combinedClusters.addAll(matchedMips);
-	    combinedClusters.addAll(matchedClumps);
-	    combinedClusters.addAll(matchedHits);
-
-	    double clusterEnergy = energy(combinedClusters);	   
-	    double trackEnergy = seedClusterTrackEnergy.get(seed);
-	    double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
-	    double normalisedResidual = (clusterEnergy - trackEnergy)/trackEnergySigma;
-
-	    Double currentThreshold = mapSeedsToThresholds.get(seed);
-	    double newThreshold = currentThreshold;
-
-	    if ( normalisedResidual > 1.0) {
-		// Cluster is too big => increase theshold
-		double baseScale = 1.0 - currentThreshold;
-		if (baseScale > 0.2) { baseScale = 0.2; }
-		double fractionToMove = (normalisedResidual / 10.0);
-		if (fractionToMove > 0.5) { fractionToMove = 0.5; }
-		newThreshold += (baseScale * fractionToMove);
-	    } else if (normalisedResidual < -1.0) {
-		// Too small => decrease threshold
-		double baseScale = 1.0 - currentThreshold;
-		if (baseScale > 0.2) { baseScale = 0.2; }
-		double fractionToMove = (-normalisedResidual / 10.0);
-		if (fractionToMove > 0.5) { fractionToMove = 0.5; }
-		newThreshold -= (baseScale * fractionToMove);
-	    }
-
-	    mapSeedsToThresholds.put(seed, newThreshold);
-	}    
-    }
-
-    void iterateOnce(Map<Cluster,Double> mapSeedsToThresholds,
-		     Map<Cluster,Cluster> mapHitsToSeeds,
-		     Map<Cluster,Cluster> mapSmallClustersToSeeds,
-		     Map<Cluster,Cluster> mapLargeClustersToSeeds,
-		     Map<Cluster,Cluster> mapPhotonClustersToSeeds,
-		     Map<Cluster,Cluster> mapMipsToSeeds,
-		     Map<Cluster,Cluster> mapClumpsToSeeds,
-		     Map<List<Cluster>, Map<Cluster,Cluster>> targetMap,
-		     Collection<Cluster> seeds,
-		     List<Cluster> mips,
-		     List<Cluster> clumps,
-		     List<Cluster> skeletonClusters,
-		     Map<Cluster,Double> seedClusterTrackEnergy
-		     ) {
-	// Compute scores
-
-	double const_hitScoreThreshold = 30.0;
-	double const_hitScoreScale = 60.0;
-	double const_hitMaxAllowedDistance = 200.0;
-
-	//System.out.println("DEBUG: Scoring isolated hits --> seeds based on proximity...");
-	Map<Cluster, Map<Cluster,Double>> isolatedHitToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapHitsToSeeds.keySet(), targetMap, seeds);
-	//System.out.println("DEBUG: Scoring small clusters --> seeds based on proximity...");
-	Map<Cluster, Map<Cluster,Double>> smallClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapSmallClustersToSeeds.keySet(), targetMap, seeds);
-	//Map<Cluster, Map<Cluster,Double>> photonClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapPhotonClustersToSeeds.keySet(), targetMap, seeds);
-	//System.out.println("DEBUG: Scoring mips --> seeds based on likelihood...");
-	Map<Cluster, Map<Cluster,Double>> mipToSeedScoreMap = scoreOnLikelihood(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
-	//System.out.println("DEBUG: Scoring clumps --> seeds based on likelihood...");
-	Map<Cluster, Map<Cluster,Double>> clumpToSeedScoreMap = scoreOnLikelihood(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
-	//System.out.println("DEBUG: Scoring large clusters --> seeds based on likelihood...");
-	Map<Cluster, Map<Cluster,Double>> largeClusterToSeedScoreMap = scoreOnProximityAndAngle(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapLargeClustersToSeeds.keySet(), mapMipsToSeeds, mapClumpsToSeeds, mipToSeedScoreMap, clumpToSeedScoreMap, mapSeedsToThresholds, targetMap, seeds);
-
-	//System.out.println("DEBUG: Updating mappings...");
-	// Update mappings based on scores
-	updateMappings(isolatedHitToSeedScoreMap, mapHitsToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
-	updateMappings(smallClusterToSeedScoreMap, mapSmallClustersToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
-	updateMappings(largeClusterToSeedScoreMap, mapLargeClustersToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
-	//updateMappings(photonClusterToSeedScoreMap, mapPhotonClustersToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
-	updateMappings(mipToSeedScoreMap, mapMipsToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
-	updateMappings(clumpToSeedScoreMap, mapClumpsToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
-    }
-
-    protected void updateMappings(Map<Cluster, Map<Cluster,Double>> clusterToSeedScoreMap, Map<Cluster,Cluster> clusterToSeedMap, Map<Cluster,Double> mapSeedsToThresholds, Map<Cluster,Double> seedClusterTrackEnergy) {
-	Collection<Cluster> seeds = mapSeedsToThresholds.keySet();
-	for (Cluster clus : clusterToSeedMap.keySet()) {
-	    if (seeds.contains(clus)) { continue; } // don't do anything silly
-	    
-	    // Default: no match
-	    clusterToSeedMap.put(clus, null);
-
-	    double maxReducedScore = 0.0;
-	    Cluster bestSeedMatch = null;
-	    if (clus == null) { throw new AssertionError("null cluster"); }
-	    Map<Cluster,Double> scoreMap = clusterToSeedScoreMap.get(clus);
-	    if (scoreMap == null) { 
-		// No good matches for this cluster
-		continue;
-	    }
-	    for (Cluster seed : scoreMap.keySet()) {
-		Double seedScore = scoreMap.get(seed);
-		Double seedThreshold = mapSeedsToThresholds.get(seed);
-		double reducedScore = seedScore / seedThreshold;
-		if (seedScore < 0.0 || seedScore > 1.0) { throw new AssertionError("Invalid score of "+seedScore); }
-		if (reducedScore > 1.0) {
-		    if (bestSeedMatch==null || reducedScore > maxReducedScore) {
-			// Last-ditch sanity check
-			List<Cluster> seedPlusCandidate = new Vector<Cluster>();
-			seedPlusCandidate.add(seed);
-			seedPlusCandidate.add(clus);
-			double trackEnergy = seedClusterTrackEnergy.get(seed);
-			double clusterEnergy = energy(seedPlusCandidate);
-			double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
-			if (clusterEnergy - trackEnergy > 2.0*trackEnergySigma) {
[truncated at 1000 lines; 1961 more skipped]
CVSspam 0.2.8