Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDriver.java+425-561.2 -> 1.3
MJC: Snapshot of code (not ready yet)

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ReclusterDriver.java	15 Oct 2007 07:16:13 -0000	1.2
+++ ReclusterDriver.java	16 Oct 2007 01:40:11 -0000	1.3
@@ -2,6 +2,7 @@
 
 import java.util.*; 
 import hep.physics.vec.*;
+import hep.physics.particle.properties.*;
 import org.lcsim.util.*;
 import org.lcsim.util.hitmap.*;
 import org.lcsim.event.*;
@@ -18,6 +19,7 @@
 
 public class ReclusterDriver extends Driver {
 
+    String m_outputParticleListName = "ReclusteredParticles";
     String m_mcList;
     String m_inputSmallClusters;
     String m_inputUnusedHits;
@@ -182,7 +184,8 @@
 	    }
 	}
 
-	// It can happen that some tracks don't match because they point to unused hits.
+	// It can happen that some tracks don't match because they point to unused hits,
+	// or they miss the calorimeter.
 	List<Track> unmatchedTracks = new Vector<Track>();
 	unmatchedTracks.addAll(trackList);
 	unmatchedTracks.removeAll(tracksMatchedToClusters.keySet());
@@ -256,7 +259,7 @@
 	// Set up initial thresholds
 	Map<Cluster,Double> mapSeedsToThresholds = new HashMap<Cluster,Double>();
 	for (Cluster seed : seeds) {
-	    mapSeedsToThresholds.put(seed, new Double(0.9));
+	    mapSeedsToThresholds.put(seed, new Double(0.8));
 	}
 
 	// Need to add other clusters to them somewhat intelligently...
@@ -282,7 +285,7 @@
 		mapSmallClustersToSeeds.put(clus,null);
 	    }
 	}
-	for (Cluster clus : largeClusters) { 
+	for (Cluster clus : largeClustersWithoutSkeletons) { 
 	    if (seeds.contains(clus)) {
 		mapLargeClustersToSeeds.put(clus,clus);
 	    } else {
@@ -350,7 +353,7 @@
 	Map<List<Cluster>, Map<Cluster,Cluster>> targetMap = new HashMap<List<Cluster>, Map<Cluster,Cluster>>(); // e.g. from "mips" to "mapMipsToSeeds"
 	targetMap.put(mips, mapMipsToSeeds);
 	targetMap.put(clumps, mapClumpsToSeeds);
-	targetMap.put(largeClusters, mapLargeClustersToSeeds);
+	targetMap.put(largeClustersWithoutSkeletons, mapLargeClustersToSeeds);
 	targetMap.put(photonClusters, mapPhotonClustersToSeeds);
 	List<Cluster> targets = new Vector<Cluster>();
 	for (List<Cluster> targetList : targetMap.keySet()) {
@@ -457,8 +460,20 @@
 	iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
 	printStatus("After seventh pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
 
+	// Last step needs to be to stuff in extra hits/clusters if they'll reasonably fit into
+	// a nearby charged cluster, heedless of thresholds.
+
+	fillIn(seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds);
+	printStatus("Final state:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);		
+
+	// ... now flesh out neutral hadrons (needs to be done)
+
 	// ... now write out!
+	writeOut(seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks);
+	
 
+	System.out.println("RECLUSTER: ALL DONE");
+	m_event = null;
     }
 
     void adjustThresholds(Map<Cluster,Double> mapSeedsToThresholds,
@@ -498,14 +513,14 @@
 	    if ( normalisedResidual > 1.0) {
 		// Cluster is too big => increase theshold
 		double baseScale = 1.0 - currentThreshold;
-		if (baseScale > 0.1) { baseScale = 0.1; }
+		if (baseScale > 0.2) { baseScale = 0.2; }
 		double fractionToMove = (normalisedResidual / 10.0);
 		if (fractionToMove > 0.5) { fractionToMove = 0.5; }
 		newThreshold += (baseScale * fractionToMove);
 	    } else if (normalisedResidual < -1.0) {
 		// Too small => decrease threshold
 		double baseScale = 1.0 - currentThreshold;
-		if (baseScale > 0.1) { baseScale = 0.1; }
+		if (baseScale > 0.2) { baseScale = 0.2; }
 		double fractionToMove = (-normalisedResidual / 10.0);
 		if (fractionToMove > 0.5) { fractionToMove = 0.5; }
 		newThreshold -= (baseScale * fractionToMove);
@@ -531,17 +546,23 @@
 		     ) {
 	// Compute scores
 
-	double const_hitScoreThreshold = 20.0;
-	double const_hitScoreScale = 40.0;
+	double const_hitScoreThreshold = 30.0;
+	double const_hitScoreScale = 60.0;
 	double const_hitMaxAllowedDistance = 200.0;
 
+	//System.out.println("DEBUG: Scoring isolated hits --> seeds based on proximity...");
 	Map<Cluster, Map<Cluster,Double>> isolatedHitToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapHitsToSeeds.keySet(), targetMap, seeds);
+	//System.out.println("DEBUG: Scoring small clusters --> seeds based on proximity...");
 	Map<Cluster, Map<Cluster,Double>> smallClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapSmallClustersToSeeds.keySet(), targetMap, seeds);
 	//Map<Cluster, Map<Cluster,Double>> photonClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapPhotonClustersToSeeds.keySet(), targetMap, seeds);
-	Map<Cluster, Map<Cluster,Double>> mipToSeedScoreMap = scoreOnLikelihood(mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
-	Map<Cluster, Map<Cluster,Double>> clumpToSeedScoreMap = scoreOnLikelihood(mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
-	Map<Cluster, Map<Cluster,Double>> largeClusterToSeedScoreMap = scoreOnProximityAndAngle(mapLargeClustersToSeeds.keySet(), mapMipsToSeeds, mapClumpsToSeeds, mipToSeedScoreMap, clumpToSeedScoreMap, mapSeedsToThresholds, targetMap, seeds);
-	
+	//System.out.println("DEBUG: Scoring mips --> seeds based on likelihood...");
+	Map<Cluster, Map<Cluster,Double>> mipToSeedScoreMap = scoreOnLikelihood(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
+	//System.out.println("DEBUG: Scoring clumps --> seeds based on likelihood...");
+	Map<Cluster, Map<Cluster,Double>> clumpToSeedScoreMap = scoreOnLikelihood(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
+	//System.out.println("DEBUG: Scoring large clusters --> seeds based on likelihood...");
+	Map<Cluster, Map<Cluster,Double>> largeClusterToSeedScoreMap = scoreOnProximityAndAngle(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapLargeClustersToSeeds.keySet(), mapMipsToSeeds, mapClumpsToSeeds, mipToSeedScoreMap, clumpToSeedScoreMap, mapSeedsToThresholds, targetMap, seeds);
+
+	//System.out.println("DEBUG: Updating mappings...");
 	// Update mappings based on scores
 	updateMappings(isolatedHitToSeedScoreMap, mapHitsToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
 	updateMappings(smallClusterToSeedScoreMap, mapSmallClustersToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
@@ -597,7 +618,8 @@
 	}		
     }
 
-    protected Map<Cluster, Map<Cluster,Double>> scoreOnLikelihood(List<Cluster> mips, List<Cluster> clumps, Map<List<Cluster>, Map<Cluster,Cluster>> targetMap, Map<Cluster,Double> mapSeedsToThresholds, List<Cluster> skeletonClusters)
+    protected Map<Cluster, Map<Cluster,Double>> scoreOnLikelihood(double const_hitScoreThreshold, double const_hitScoreScale, double const_hitMaxAllowedDistance, 
+								  List<Cluster> mips, List<Cluster> clumps, Map<List<Cluster>, Map<Cluster,Cluster>> targetMap, Map<Cluster,Double> mapSeedsToThresholds, List<Cluster> skeletonClusters)
     {
 	// Output is a map from each mip/clump to a scoremap
 	// where the scoremap is a map from each seed to a score.
@@ -611,7 +633,7 @@
 
 	double scaleFactorTrackToTrack = 1.0;
 	double scaleFactorTrackToClump = 1.0;
-	double scaleFactorClumpToClump = 1.0;
+	double scaleFactorClumpToClump = 10.0; // I dislike this type of link so will suppress it
 
 	for (int i=0; i<mips.size(); i++) {
 	    Cluster clus1 = mips.get(i);
@@ -629,10 +651,11 @@
 		if (score < 0.0) { score = 0.0; }
 		potentialTrackToTrackLinks.put(newLink, new Double(score));
 		potentialLinks.put(newLink, new Double(score));
-		//System.out.println("DEBUG: Init link from MIP ("+clus1.getCalorimeterHits().size()+" hits) to MIP ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score);
+		System.out.println("DEBUG: Init link from MIP "+i+"/"+mips.size()+" ("+clus1.getCalorimeterHits().size()+" hits) to MIP "+j+"/"+mips.size()+" ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score+"... proximity="+proximity(clus1,clus2));
 	    }
 	    // Next: Track-Clump links
-	    for (Cluster clus2 : clumps) {
+	    for (int j=0; j<clumps.size(); j++) {
+		Cluster clus2 = clumps.get(j);
 		if (!checkIfSkeletonComponentsClose(skeletonClusters, clus1, clus2)) { 
 		    // Too far apart => ignore
 		    continue;
@@ -644,28 +667,28 @@
 		if (score < 0.0) { score = 0.0; }
 		potentialTrackToClumpLinks.put(newLink, new Double(score));
 		potentialLinks.put(newLink, new Double(score));
-		//System.out.println("DEBUG: Init link from MIP ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score);
+		System.out.println("DEBUG: Init link from MIP "+i+"/"+mips.size()+" ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP "+j+"/"+clumps.size()+" ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToClump+" = "+score+"... proximity="+proximity(clus1,clus2));
 	    }
 	}
 	// Finally: Clump-Clump links, again being careful of double-counting
-	for (int i=0; i<clumps.size(); i++) {
-	    Cluster clus1 = clumps.get(i);
-	    for (int j=i+1; j<clumps.size(); j++) {
-		Cluster clus2 = clumps.get(j);
-		if (!checkIfSkeletonComponentsClose(skeletonClusters, clus1, clus2)) { 
-		    // Too far apart => ignore
-		    continue;
-		}
-		Link newLink = new Link(clus1,clus2);
-		double likelihood = m_eval.getLinkLikelihoodClumpToClump(clus1,clus2);
-		double score = likelihood / scaleFactorClumpToClump;
-		if (score > 1.0) { score = 1.0; }
-		if (score < 0.0) { score = 0.0; }
-		potentialClumpToClumpLinks.put(newLink, new Double(score));
-		potentialLinks.put(newLink, new Double(score));
-		//System.out.println("DEBUG: Init link from CLUMP ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score);
-	    }
-	}
+// 	for (int i=0; i<clumps.size(); i++) {
+// 	    Cluster clus1 = clumps.get(i);
+// 	    for (int j=i+1; j<clumps.size(); j++) {
+// 		Cluster clus2 = clumps.get(j);
+// 		if (!checkIfSkeletonComponentsClose(skeletonClusters, clus1, clus2)) { 
+// 		    // Too far apart => ignore
+// 		    continue;
+// 		}
+// 		Link newLink = new Link(clus1,clus2);
+// 		double likelihood = m_eval.getLinkLikelihoodClumpToClump(clus1,clus2);
+// 		double score = likelihood / scaleFactorClumpToClump;
+// 		if (score > 1.0) { score = 1.0; }
+// 		if (score < 0.0) { score = 0.0; }
+// 		potentialClumpToClumpLinks.put(newLink, new Double(score));
+// 		potentialLinks.put(newLink, new Double(score));
+// 		System.out.println("DEBUG: Init link from CLUMP "+i+"/"+clumps.size()+" ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP "+j+"/"+clumps.size()+" ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorClumpToClump+" = "+score+"... proximity="+proximity(clus1,clus2));
+// 	    }
+// 	}
 
 	// For convenience, make list of all components:
 	List<Cluster> mipsAndClumps = new Vector<Cluster>();
@@ -691,8 +714,8 @@
 		initialHopScore = 1.0;
 	    } else {
 		double minProximity = 0.0;
-		double x0 = 20.0; // const_hitScoreThreshold;
-		double x1 = x0 + 40.0; // const_hitScoreScale;
+		double x0 = const_hitScoreThreshold;
+		double x1 = x0 + const_hitScoreScale;
 		double b = (1.41421*x0 - x1) / (-2.41421);
 		double a = (x0+b)*(x0+b); 
 		for (Cluster component : mipsAndClumps) {
@@ -714,9 +737,10 @@
 	    zeroHopScores.put(zeroHopPath, new Double(initialHopScore));
 	    Map<Path,Double> previousMap = zeroHopScores;
 
+	    //System.out.println("DEBUG: About to start work on path map...");
 	    List<Map<Path,Double>> routes = new Vector<Map<Path,Double>>();
 	    while (true) {
-		Map<Path,Double> newMap = addHop(previousMap, potentialLinks, threshold);
+		Map<Path,Double> newMap = addHop(previousMap, potentialLinks, threshold, routes);
 		if (newMap.keySet().size()>0) {
 		    routes.add(newMap);
 		    previousMap = newMap;
@@ -725,6 +749,7 @@
 		    break;
 		}
 	    }
+	    //System.out.println("DEBUG: Done making path map. About to start scoring...");
 
 	    Map<Cluster,Double> scoreForEachDestination = new HashMap<Cluster,Double>();
 	    for (Map<Path,Double> route : routes) {
@@ -758,7 +783,8 @@
 	return outputMap;
     }
 
-    protected Map<Cluster, Map<Cluster,Double>> scoreOnProximityAndAngle(Collection<Cluster> inputClusters, 
+    protected Map<Cluster, Map<Cluster,Double>> scoreOnProximityAndAngle(double const_hitScoreThreshold, double const_hitScoreScale, double const_hitMaxAllowedDistance, 
+									 Collection<Cluster> inputClusters, 
 									 Map<Cluster,Cluster> mapMipsToSeeds, 
 									 Map<Cluster,Cluster> mapClumpsToSeeds, 
 									 Map<Cluster, Map<Cluster,Double>> mipToSeedScoreMap, 
@@ -777,9 +803,6 @@
 	Map<Cluster, Map<Cluster,Double>> outputMap = new HashMap<Cluster, Map<Cluster,Double>>();
 
 	// Here are some constants...
-	double const_hitScoreThreshold = 20.0;
-	double const_hitScoreScale = 40.0;
-	double const_hitMaxAllowedDistance = 200.0;
 	double x0 = const_hitScoreThreshold;
 	double x1 = x0 + const_hitScoreScale;
 	double b = (1.41421*x0 - x1) / (-2.41421);
@@ -921,12 +944,34 @@
 	    List<Cluster> matchedMips = reverseMap(seed, mapMipsToSeeds);
 	    List<Cluster> matchedClumps = reverseMap(seed, mapClumpsToSeeds);
 	    List<Cluster> matchedHits = reverseMap(seed, mapHitsToSeeds);
-	    if (matchedPhotons.size() > 0) { printme += matchedPhotons.size()+" photons; "; }
-	    if (matchedSmallClusters.size() > 0) { printme += matchedSmallClusters.size()+" smallClus; "; }
-	    if (matchedLargeClusters.size() > 0) { printme += matchedLargeClusters.size()+" largeClus; "; }
-	    if (matchedMips.size() > 0) { printme += matchedMips.size()+" mips; "; }
-	    if (matchedClumps.size() > 0) { printme += matchedClumps.size()+" clumps; "; }
-	    if (matchedHits.size() > 0) { printme += matchedHits.size()+" hits; "; }
+	    if (matchedPhotons.size() > 0) { 
+		int count = 0;
+		for (Cluster clus : matchedPhotons) { count += clus.getCalorimeterHits().size(); }
+		printme += matchedPhotons.size()+" photons ("+count+" hits); "; 
+	    }
+	    if (matchedSmallClusters.size() > 0) { 
+		int count = 0;
+		for (Cluster clus : matchedSmallClusters) { count += clus.getCalorimeterHits().size(); }
+		printme += matchedSmallClusters.size()+" smallClus ("+count+" hits); "; 
+	    }
+	    if (matchedLargeClusters.size() > 0) { 
+		int count = 0;
+		for (Cluster clus : matchedLargeClusters) { count += clus.getCalorimeterHits().size(); }
+		printme += matchedLargeClusters.size()+" largeClus ("+count+" hits); "; 
+	    }
+	    if (matchedMips.size() > 0) { 
+		int count = 0;
+		for (Cluster clus : matchedMips) { count += clus.getCalorimeterHits().size(); }
+		printme += matchedMips.size()+" mips ("+count+" hits); "; 
+	    }
+	    if (matchedClumps.size() > 0) { 
+		int count = 0;
+		for (Cluster clus : matchedClumps) { count += clus.getCalorimeterHits().size(); }
+		printme += matchedClumps.size()+" clumps ("+count+" hits); "; 
+	    }
+	    if (matchedHits.size() > 0) { 
+		printme += matchedHits.size()+" hits ("+matchedHits.size()+" hits); "; 
+	    }
 
 	    List<Cluster> combinedClusters = new Vector<Cluster>();
 	    combinedClusters.addAll(matchedPhotons);
@@ -941,7 +986,25 @@
 		printme += " (effic="+quoteEfficiency(trackList, combinedClusters)+", purity="+quotePurity(trackList, combinedClusters)+")";
 	    }
 
+	    // Crosscheck no double-counting
+	    Set<CalorimeterHit> hitsUsed = new HashSet<CalorimeterHit>();
+	    Set<CalorimeterHit> hitsDoubleCounted = new HashSet<CalorimeterHit>();
+	    for (Cluster clus : combinedClusters) {
+		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		    boolean alreadyPresent = hitsUsed.contains(hit);
+		    if (alreadyPresent) {
+			hitsDoubleCounted.add(hit);
+		    } else {
+			hitsUsed.add(hit);
+		    }
+		}
+	    }
+	    if (hitsDoubleCounted.size()>0) {
+		printme += " -- WARNING: "+hitsDoubleCounted.size()+" hits double-counted.";
+	    }
+		    
 	    System.out.println(printme);
+
 	}    
 
 	List<Cluster> unassignedPhotonClusters = new Vector<Cluster>();
@@ -993,11 +1056,14 @@
 	return output;
     }
 
+    protected double energy(Cluster clus) {
+	return m_calib.getEnergy(clus);
+    }
     protected double energy(List<Cluster> inputClusters) {
 	Cluster magicCombinedCluster = makeCombinedCluster(inputClusters);
-	return m_calib.getEnergy(magicCombinedCluster);
+	return energy(magicCombinedCluster);
     }
-    protected Cluster makeCombinedCluster(List<Cluster> inputClusters) {
+    protected BasicCluster makeCombinedCluster(List<Cluster> inputClusters) {
 	BasicCluster magicCombinedCluster = new BasicCluster();
 	for (Cluster clus : inputClusters) {
 	    magicCombinedCluster.addCluster(clus);
@@ -1162,7 +1228,7 @@
 	public void addSteps(List<Cluster> l) { for (Cluster c : l) { addStep(c); } }
     }
 	
-    protected Map<Path,Double> addHop(Map<Path,Double> previousHopScoreMap, Map<Link,Double> potentialLinks, Double threshold) {
+    protected Map<Path,Double> addHop(Map<Path,Double> previousHopScoreMap, Map<Link,Double> potentialLinks, Double threshold, List<Map<Path,Double>> routes) {
 	Map<Path,Double> newHopScores = new HashMap<Path,Double>();
 	for (Path oldPath : previousHopScoreMap.keySet()) {
 	    Cluster previousStep = oldPath.finalStep();
@@ -1177,10 +1243,57 @@
 			double score = scoreOfAllButLastHop * scoreOfLastHop;
 			if (score < 0.0 || score > 1.0) { throw new AssertionError("Invalid score: "+score); }
 			if (score > threshold) {
-			    Path newPath = new Path();
-			    newPath.addSteps(oldPath.getSteps());
-			    newPath.addStep(counterpart);
-			    newHopScores.put(newPath, new Double(score));
+			    // Check: Does a simpler, cheaper path exist?
+			    boolean veto = false;
+			    for (Map<Path,Double> route : routes) {
+				if (veto) { break; }
+				for (Path preExistingPath : route.keySet()) {
+				    Double scoreOfPreExistingPath = route.get(preExistingPath);
+				    if (scoreOfPreExistingPath >= score && preExistingPath.initialStep() == oldPath.initialStep() && preExistingPath.finalStep() == counterpart) {
+					// yes
+					veto = true;
+					break;
+				    }
+				}
+			    }
+			    if (!veto) {
+				Path newPath = new Path();
+				newPath.addSteps(oldPath.getSteps());
+				newPath.addStep(counterpart);
+				newHopScores.put(newPath, new Double(score));
+				String printme = new String();
+				printme += "DEBUG: Added a new path with "+newPath.getSteps().size()+" steps: ";
+				for (Cluster step : newPath.getSteps()) {
+				    printme += " -> [";
+				    printme += step.getCalorimeterHits().size();
+				    printme += "]";
+				}
+				printme += " so score = ";
+				{
+				    Cluster debugPrevStep = newPath.initialStep();
+				    for (Cluster debugCurrentStep : newPath.getSteps()) {
+					if (debugCurrentStep == newPath.initialStep()) { continue; }
+					for (Link potentialLink : potentialLinks.keySet()) {
+					    if (potentialLink.contains(debugCurrentStep) && potentialLink.contains(debugPrevStep)) {
+						printme += potentialLinks.get(potentialLink); // print score for that link
+						printme += " ";
+					    }
+					}
+					printme += "* ";
+					debugPrevStep = debugCurrentStep;
+				    }
+				    printme += " = "+score;
+				}
+				System.out.println(printme);
+				// Sanity check:
+				Set<CalorimeterHit> usedHits = new HashSet<CalorimeterHit>();
+				for (Cluster step : newPath.getSteps()) {
+				    for (CalorimeterHit hit : step.getCalorimeterHits()) {
+					if (usedHits.contains(hit)) { throw new AssertionError("Duplicated hit in step with "+step.getCalorimeterHits().size()+" hits"); }
+					usedHits.add(hit);
+				    }
+				}
+			    }
 			}
 		    }
 		}
@@ -1203,8 +1316,264 @@
 	} else {
 	    // not in same skeleton... check if low proximity...
 	    double distance = proximity(clus1, clus2);
-	    boolean close = (distance < 100.0); // FIXME: Shouldn't be hard-coded
+	    boolean close = (distance < 10.0); // FIXME: Shouldn't be hard-coded, and perhaps shouldn't be so tight.
 	    return close;
 	}
     }
+
+    void fillIn(Collection<Cluster> seeds, Map<Cluster,Double> seedClusterTrackEnergy, Map<Cluster,Cluster> mapPhotonClustersToSeeds, Map<Cluster,Cluster> mapSmallClustersToSeeds, Map<Cluster,Cluster> mapLargeClustersToSeeds, Map<Cluster,Cluster> mapMipsToSeeds, Map<Cluster,Cluster> mapClumpsToSeeds, Map<Cluster,Cluster> mapHitsToSeeds)
+    {
+	// Here are the unassigned clusters
+	List<Cluster> unassignedPhotonClusters = new Vector<Cluster>();
+	List<Cluster> unassignedSmallClusters = new Vector<Cluster>();
+	List<Cluster> unassignedLargeClusters = new Vector<Cluster>();
+	List<Cluster> unassignedMips = new Vector<Cluster>();
+	List<Cluster> unassignedClumps = new Vector<Cluster>();
+	List<Cluster> unassignedHits = new Vector<Cluster>();
+	for (Cluster clus : mapPhotonClustersToSeeds.keySet()) {
+	    if (mapPhotonClustersToSeeds.get(clus) == null) {
+		unassignedPhotonClusters.add(clus);
+	    }
+	}
+	for (Cluster clus : mapSmallClustersToSeeds.keySet()) {
+	    if (mapSmallClustersToSeeds.get(clus) == null) {
+		unassignedSmallClusters.add(clus);
+	    }
+	}
+	for (Cluster clus : mapLargeClustersToSeeds.keySet()) {
+	    if (mapLargeClustersToSeeds.get(clus) == null) {
+		unassignedLargeClusters.add(clus);
+	    }
+	}
+	for (Cluster clus : mapMipsToSeeds.keySet()) {
+	    if (mapMipsToSeeds.get(clus) == null) {
+		unassignedMips.add(clus);
+	    }
+	}
+	for (Cluster clus : mapClumpsToSeeds.keySet()) {
+	    if (mapClumpsToSeeds.get(clus) == null) {
+		unassignedClumps.add(clus);
+	    }
+	}
+	for (Cluster clus : mapHitsToSeeds.keySet()) {
+	    if (mapHitsToSeeds.get(clus) == null) {
+		unassignedHits.add(clus);
+	    }
+	}
+
+	// Here are the assigned clusters
+	Map<Cluster,BasicCluster> mapSeedToWholeCluster = new HashMap<Cluster,BasicCluster>();
+	for (Cluster seed : seeds) {
+	    List<Cluster> combinedClusters = new Vector<Cluster>();
+	    combinedClusters.addAll(reverseMap(seed, mapPhotonClustersToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapSmallClustersToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapLargeClustersToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapMipsToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapClumpsToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapHitsToSeeds));
+	    BasicCluster combined = makeCombinedCluster(combinedClusters);
+	    mapSeedToWholeCluster.put(seed, combined);
+	}
+	
+	// Now try and push the unassigned in. Start with the big things and
+	// work down to the little things so as not to pull in all and sundry.
+	double dotProductThreshold = 0.75;
+	fillIn(unassignedLargeClusters, seeds, mapSeedToWholeCluster, seedClusterTrackEnergy, mapLargeClustersToSeeds, dotProductThreshold);
+	fillIn(unassignedSmallClusters, seeds, mapSeedToWholeCluster, seedClusterTrackEnergy, mapSmallClustersToSeeds, dotProductThreshold);
+	fillIn(unassignedClumps, seeds, mapSeedToWholeCluster, seedClusterTrackEnergy, mapClumpsToSeeds, dotProductThreshold);
+	fillIn(unassignedHits, seeds, mapSeedToWholeCluster, seedClusterTrackEnergy, mapHitsToSeeds, dotProductThreshold);
+	fillIn(unassignedMips, seeds, mapSeedToWholeCluster, seedClusterTrackEnergy, mapMipsToSeeds, dotProductThreshold);
+    }
+
+    void fillIn(Collection<Cluster> unassignedClusters, Collection<Cluster> seeds, Map<Cluster,BasicCluster> mapSeedToWholeCluster, Map<Cluster,Double> seedClusterTrackEnergy, Map<Cluster,Cluster> mapClustersToSeeds, double dotProductThreshold) 
+    {
+	for (Cluster clus : unassignedClusters) {
+	    for (Cluster seed : seeds) {
+		double trackEnergy = seedClusterTrackEnergy.get(seed);
+		if (pushInCluster(clus, seed, mapSeedToWholeCluster, dotProductThreshold, trackEnergy)) {
+		    // Yes, add it. Mark it in the map:
+		    mapClustersToSeeds.put(clus, seed);
+		    // ... and also in our local copy so that we keep track of the running total of energy:
+		    mapSeedToWholeCluster.get(seed).addCluster(clus);
+		    break;
+		}
+	    }
+	}
+    }
+
+    boolean pushInCluster(Cluster unassignedCluster, Cluster seed, Map<Cluster,BasicCluster> mapSeedToWholeCluster, double dotProductThreshold, double trackEnergy) {
+	Hep3Vector seedPosition = new BasicHep3Vector(seed.getPosition());
+	Hep3Vector unassignedClusterPosition = new BasicHep3Vector(unassignedCluster.getPosition());
+	Hep3Vector displacementVector = VecOp.sub(unassignedClusterPosition, seedPosition);
+	double dotProduct = VecOp.dot(VecOp.unit(seedPosition), VecOp.unit(displacementVector));
+	if (dotProduct < dotProductThreshold) {
+	    // no good -- not in cone
+	    return false;
+	} else {
+	    // in cone -- but is energy ok?
+	    BasicCluster wholeClusterOfSeed = mapSeedToWholeCluster.get(seed);
+	    List<Cluster> merged = new Vector<Cluster>();
+	    merged.add(wholeClusterOfSeed);
+	    merged.add(unassignedCluster);
+	    double combinedEnergy = energy(merged);
+	    double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
+	    return (combinedEnergy - trackEnergy < 2.0*trackEnergySigma);
+	}
+    }
+
+    void writeOut(Collection<Cluster> seeds, Map<Cluster,Double> seedClusterTrackEnergy, Map<Cluster,Cluster> mapPhotonClustersToSeeds, Map<Cluster,Cluster> mapSmallClustersToSeeds, Map<Cluster,Cluster> mapLargeClustersToSeeds, Map<Cluster,Cluster> mapMipsToSeeds, Map<Cluster,Cluster> mapClumpsToSeeds, Map<Cluster,Cluster> mapHitsToSeeds, Map<Cluster,List<Track>> clustersMatchedToTracks)
+    {
+	List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+	
+	// Charged pions
+	int pdg_piplus = 211;
+	int pdg_piminus = 211;
+	ParticleType type_piplus = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_piplus);
+	ParticleType type_piminus = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_piminus);
+	BaseParticleID pid_piplus = new BaseParticleID(type_piplus);
+	BaseParticleID pid_piminus = new BaseParticleID(type_piminus);
+	double mass_piplus = 0.1396;
+
+	// Here are the assigned clusters
+	Map<Cluster,BasicCluster> mapSeedToWholeCluster = new HashMap<Cluster,BasicCluster>();
+	for (Cluster seed : seeds) {
+	    List<Cluster> combinedClusters = new Vector<Cluster>();
+	    combinedClusters.addAll(reverseMap(seed, mapPhotonClustersToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapSmallClustersToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapLargeClustersToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapMipsToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapClumpsToSeeds));
+	    combinedClusters.addAll(reverseMap(seed, mapHitsToSeeds));
+	    BasicCluster combined = makeCombinedCluster(combinedClusters);
+	    BaseReconstructedParticle part = new BaseReconstructedParticle();
+	    List<Track> tracks = clustersMatchedToTracks.get(seed);
+	    if (tracks == null) { throw new AssertionError("Seed has null track list"); }
+	    if (tracks.size() == 0) { throw new AssertionError("Seed has empty track list"); }
+	    part.addCluster(combined);
+	    int charge = 0;
+	    Hep3Vector threeMomentum = new BasicHep3Vector(0,0,0);
+	    HepLorentzVector fourMomentum = new BasicHepLorentzVector(0.0, threeMomentum);
+	    for (Track tr : tracks) {
+		part.addTrack(tr);
+		charge += tr.getCharge();
+		Hep3Vector trackMomentum = new BasicHep3Vector(tr.getMomentum());
+		double trackMomentumMag = trackMomentum.magnitude();
+		double trackEnergySq = trackMomentumMag*trackMomentumMag + mass_piplus*mass_piplus;
+		threeMomentum = VecOp.add(threeMomentum, trackMomentum);
+		HepLorentzVector trackFourMomentum = new BasicHepLorentzVector(Math.sqrt(trackEnergySq), trackMomentum);
+		fourMomentum = VecOp.add(fourMomentum, trackFourMomentum);
+	    }
+	    part.set4Vector(fourMomentum);
+	    if (tracks.size() == 1) {
+		// Unique track => PID etc somewhat well-defined
+		part.setReferencePoint(new BasicHep3Vector(part.getTracks().get(0).getReferencePoint()));
+		part.setMass(mass_piplus);
+		if (charge == 1) {
+		    part.addParticleID(pid_piplus);
+		    part.setParticleIdUsed(pid_piplus);
+		} else if (charge == -1) {
+		    part.addParticleID(pid_piminus);
+		    part.setParticleIdUsed(pid_piminus);
+		} else {
+		    throw new AssertionError("Invalid charge: "+charge);
+		}
+	    } else {
+		// Multiple tracks => some quantities not well-defined
+		// Leave at defaults
+	    }
+	    outputParticleList.add(part);
+	}
+
+	// Here are the unassigned clusters
+	List<Cluster> unassignedPhotonClusters = new Vector<Cluster>();
+	List<Cluster> unassignedSmallClusters = new Vector<Cluster>();
+	List<Cluster> unassignedLargeClusters = new Vector<Cluster>();
+	List<Cluster> unassignedMips = new Vector<Cluster>();
+	List<Cluster> unassignedClumps = new Vector<Cluster>();
+	List<Cluster> unassignedHits = new Vector<Cluster>();
+	for (Cluster clus : mapPhotonClustersToSeeds.keySet()) {
+	    if (mapPhotonClustersToSeeds.get(clus) == null) {
+		unassignedPhotonClusters.add(clus);
+	    }
+	}
+	for (Cluster clus : mapSmallClustersToSeeds.keySet()) {
+	    if (mapSmallClustersToSeeds.get(clus) == null) {
+		unassignedSmallClusters.add(clus);
+	    }
+	}
+	for (Cluster clus : mapLargeClustersToSeeds.keySet()) {
+	    if (mapLargeClustersToSeeds.get(clus) == null) {
+		unassignedLargeClusters.add(clus);
+	    }
+	}
+	for (Cluster clus : mapMipsToSeeds.keySet()) {
+	    if (mapMipsToSeeds.get(clus) == null) {
+		unassignedMips.add(clus);
+	    }
+	}
+	for (Cluster clus : mapClumpsToSeeds.keySet()) {
+	    if (mapClumpsToSeeds.get(clus) == null) {
+		unassignedClumps.add(clus);
+	    }
+	}
+	for (Cluster clus : mapHitsToSeeds.keySet()) {
+	    if (mapHitsToSeeds.get(clus) == null) {
+		unassignedHits.add(clus);
+	    }
+	}
+
+	// Add in unassigned photon clusters as photons:
+	int pdg_photon = 22;
+	ParticleType type_photon = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_photon);
+	BaseParticleID pid_photon = new BaseParticleID(type_photon);
+	for (Cluster clus : unassignedPhotonClusters) {
+	    BaseReconstructedParticle part = new BaseReconstructedParticle();
+	    part.addCluster(clus);
+	    double clusterEnergy = energy(clus);
+	    Hep3Vector pos = new BasicHep3Vector(clus.getPosition());
+	    Hep3Vector threeMomentum = VecOp.mult(clusterEnergy, VecOp.unit(pos)); // assume it comes from the IP
+	    HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
+	    part.set4Vector(fourMomentum);
+	    part.setReferencePoint(0,0,0);
+	    part.setCharge(0);
+	    // Set the PID and mass
+	    part.addParticleID(pid_photon);
+	    part.setParticleIdUsed(pid_photon);
+	    part.setMass(0.0);
+	    // Add to the output list
+	    outputParticleList.add(part);
+	}
+
+	// Add in unassigned clumps & large clusters as neutral hadrons
+	int pdg_K0 = 130;
+	ParticleType type_K0 = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_K0);
+	BaseParticleID pid_K0 = new BaseParticleID(type_K0);
+	double mass_K0 = type_K0.getMass();
+
+	List<Cluster> largeNeutrals = new Vector<Cluster>();
+	largeNeutrals.addAll(unassignedClumps);
+	largeNeutrals.addAll(unassignedLargeClusters);
+	for (Cluster clus : largeNeutrals) {
+	    BaseReconstructedParticle part = new BaseReconstructedParticle();
+	    part.addCluster(clus);
+	    double clusterEnergy = energy(clus);
+	    double clusterMomentumMagSq = clusterEnergy*clusterEnergy - mass_K0*mass_K0;
+	    if (clusterMomentumMagSq<0.0) { clusterMomentumMagSq = 0.0; }
+	    Hep3Vector pos = new BasicHep3Vector(clus.getPosition());
+	    Hep3Vector threeMomentum = VecOp.mult(Math.sqrt(clusterMomentumMagSq), VecOp.unit(pos)); // assume it comes from the IP
+	    HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
+	    part.set4Vector(fourMomentum);
+	    part.setReferencePoint(0,0,0);
+	    part.setCharge(0);
+	    // Set the PID and mass
+	    part.addParticleID(pid_K0);
+	    part.setParticleIdUsed(pid_K0);
+	    part.setMass(mass_K0);
+	    // Add to the output list
+	    outputParticleList.add(part);
+	}
+
+	m_event.put(m_outputParticleListName, outputParticleList);
+    }
+
 }
CVSspam 0.2.8