Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDTreeDriver.java+449-1171.35 -> 1.36
ReclusterDriver.java+55-111.28 -> 1.29
SteveMIPReassignmentAlgorithm.java+4-41.1 -> 1.2
+508-132
3 modified files
MJC: (contrib) In PFA, deal better with pathological cases for track-cluster linking (#346)

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.35 -> 1.36
diff -u -r1.35 -r1.36
--- ReclusterDTreeDriver.java	23 Jul 2008 17:41:06 -0000	1.35
+++ ReclusterDTreeDriver.java	4 Aug 2008 17:38:17 -0000	1.36
@@ -34,7 +34,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.35 2008/07/23 17:41:06 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.36 2008/08/04 17:38:17 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -81,6 +81,8 @@
     protected boolean m_fixSingleTracksWithCone = true;
     protected boolean m_fixJetsWithCone = true;
     protected boolean m_useSimpleConeForReassignment = true;
+    
+    protected boolean m_debugSeedSplitting = false;
 
     public void writeExtraEventOutput(boolean writeExtra) {
 	m_writeExtraEventOutput = writeExtra;
@@ -204,6 +206,10 @@
 	Map<Cluster, Cluster> treeOfClump = new HashMap<Cluster,Cluster>();
 	Map<Cluster, Cluster> treeOfLeftoverHits = new HashMap<Cluster,Cluster>();
 
+	// Identify the start point of showers
+	org.lcsim.recon.pfa.identifier.LocalHelixExtrapolator findCluster = new org.lcsim.recon.pfa.identifier.LocalHelixExtrapolator();
+	findCluster.process(m_event); // picks up geometry
+
 	// Match tracks
 	// ------------
 	Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
@@ -235,6 +241,89 @@
 	    }
 	    Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, allMatchableClusters);
 	    if (matchedCluster != null) {
+ 		if (leftoverHitClusters.contains(matchedCluster)) {
+		    if (m_debugSeedSplitting) {
+			// Debug printout
+			Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
+			if (interceptPoint != null) {
+			    double primaryDist = proximity(matchedCluster, interceptPoint);
+			    int innermostLayerOfMatchedCluster = 99;
+			    for (CalorimeterHit hit : matchedCluster.getCalorimeterHits()) {
+				int layer = getLayer(hit);
+				if (layer < innermostLayerOfMatchedCluster) { innermostLayerOfMatchedCluster = layer; }
+			    }
+			    System.out.println("DEBUG: Matched track with p="+momentum(tr).magnitude()+" to leftoverHitCluster with "+matchedCluster.getCalorimeterHits().size()+" hits at proximity="+primaryDist+" with innermost layer "+innermostLayerOfMatchedCluster);
+			}
+			Map<MCParticle, List<SimCalorimeterHit>> truthInfo = truthFromCluster(matchedCluster);
+			for (MCParticle part : truthInfo.keySet()) {
+			    List<SimCalorimeterHit> truthHits = truthInfo.get(part);
+			    System.out.println("DEBUG:     -> contribution of "+truthHits.size()+" hits from particle with p="+part.getMomentum().magnitude());
+			}
+		    }
+		    Cluster tree = treeOfSharedCluster.get(matchedCluster);
+		    if (tree != null) {
+			List<Cluster> targets = targetsInTree.get(tree);
+			if (m_debugSeedSplitting) {
+			    // Debug printout
+			    for (Cluster target : targets) {
+				String idname = new String("UNKNOWN");
+				if (mipsOld.contains(target)) {
+				    idname = new String("MipOld");
+				} else if (mipsNew.contains(target)) {
+				    idname = new String("MipNew");
+				} else if (clumps.contains(target)) {
+				    idname = new String("Clump");
+				}
+				MCParticle targetTruth = quoteDominantParticle(target);
+				double targetTruthMom = targetTruth.getMomentum().magnitude();
+				System.out.println("DEBUG:     -> structure found: "+idname+" with "+target.getCalorimeterHits().size()+" hits from particle with p="+targetTruthMom);
+			    }
+			}
+			if (targets.size()>0) {
+			    Cluster rematchedSubCluster = m_trackClusterMatcher.matchTrackToCluster(tr, targets);
+			    if (rematchedSubCluster != null) {
+				matchedCluster = rematchedSubCluster; // CHANGES BEHAVIOUR!
+				if (m_debugSeedSplitting) {
+				    // Debug printout
+				    String idname = new String("UNKNOWN");
+				    if (mipsOld.contains(rematchedSubCluster)) {
+					idname = new String("MipOld");
+				    } else if (mipsNew.contains(rematchedSubCluster)) {
+					idname = new String("MipNew");
+				    } else if (clumps.contains(rematchedSubCluster)) {
+					idname = new String("Clump");
+				    }
+				    MCParticle targetTruth = quoteDominantParticle(rematchedSubCluster);
+				    double targetTruthMom = targetTruth.getMomentum().magnitude();
+				    if (rematchedSubCluster != null) {
+					System.out.println("DEBUG:     -> Rematch to: "+idname+" with "+rematchedSubCluster.getCalorimeterHits().size()+" hits from particle with p="+targetTruthMom);
+				    }
+				}
+			    } else {
+				if (m_debugSeedSplitting) {
+				    // Debug printout
+				    System.out.println("DEBUG:     -> Rematch failed (no targets passed track-matching)");
+				    Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
+				    if (interceptPoint != null) {
+					for (Cluster target : targets) {
+					    double dist = proximity(target, interceptPoint);
+					    int innermostLayerOfTarget = 99;
+					    for (CalorimeterHit hit : target.getCalorimeterHits()) {
+						int layer = getLayer(hit);
+						if (layer < innermostLayerOfTarget) { innermostLayerOfTarget = layer; }
+					    }
+					    System.out.println("DEBUG:           * Target with "+target.getCalorimeterHits().size()+" hits at dist="+dist+" with innermost layer="+innermostLayerOfTarget);
+					}
+				    }
+				}
+			    }
+			}
+		    }
+ 		} else if (treesWithNoStructure.contains(matchedCluster)) {
+		    if (m_debugSeedSplitting) {
+			System.out.println("DEBUG: Matched track with p="+momentum(tr).magnitude()+" to block with "+matchedCluster.getCalorimeterHits().size()+" hits.");
+		    }
+ 		}
 		tracksMatchedToClusters.put(tr, matchedCluster);
 		List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
 		if (clusTrList == null) { 
@@ -281,46 +370,184 @@
 	    }
 	}
 
+	ShowerPointFinder showerFinder = new ShowerPointFinder(findCluster, allHits, tracksMatchedToClusters);
+	Map<Track,BasicCluster> MapTrkToMIP = showerFinder.findMips(); 
+	event.put("ShowerFinderMapTrackToMip", MapTrkToMIP);
+	List<Cluster> preShowerMips = new Vector<Cluster>();
+	preShowerMips.addAll(MapTrkToMIP.values());
+	event.put("ShowerFinderMips", preShowerMips);
+
 	// Figure out whether tracks were uniquely matched or not:
 	Set<Track> uniquelyMatchedTracks = new HashSet<Track>();
 	Set<Track> ambiguouslyMatchedTracks = new HashSet<Track>();
-	List<Set<Track>> ambiguouslyMatchedTrackGroups = new Vector<Set<Track>>();
-	for (Cluster clus : clustersMatchedToTracks.keySet()) {
-	    List<Track> tracksMatchedToThisCluster = clustersMatchedToTracks.get(clus);
-	    if (tracksMatchedToThisCluster.size() > 1) {
-		ambiguouslyMatchedTracks.addAll(tracksMatchedToThisCluster);
-		Set<Track> newSet = new HashSet<Track>();
-		newSet.addAll(tracksMatchedToThisCluster);
-		ambiguouslyMatchedTrackGroups.add(newSet);
-	    } else {
-		uniquelyMatchedTracks.addAll(tracksMatchedToThisCluster);
-	    }
-	}
-	if (uniquelyMatchedTracks.size() + ambiguouslyMatchedTracks.size() != tracksMatchedToClusters.keySet().size()) { throw new AssertionError("Book-keeping error: "+uniquelyMatchedTracks.size()+" + "+ambiguouslyMatchedTracks.size()+" != "+tracksMatchedToClusters.keySet().size()); }
-	// Redo ambiguous tracks hackishly:
-	// Similarly, redo track -> cluster map to use tweaked tracks
 	Map<Track, Cluster> tweakedTracksMatchedToClusters = new HashMap<Track,Cluster>();
 	Map<Cluster, Track> clustersMatchedToTweakedTracks = new HashMap<Cluster, Track>();
 	List<Track> tweakedTracks = new Vector<Track>();
 	Map<Track, Track> mapOrigTrackToTweakedTrack = new HashMap<Track,Track>();
-	tweakedTracks.addAll(uniquelyMatchedTracks);
-	for (Track tr : uniquelyMatchedTracks) {
-	    mapOrigTrackToTweakedTrack.put(tr,tr);
-	}
-	for (Set<Track> trackSet : ambiguouslyMatchedTrackGroups) {
-	    MultipleTrackTrack tweakedTrack = new MultipleTrackTrack(trackSet);
-	    tweakedTracks.add(tweakedTrack);
-	    for (Track origTrack : trackSet) {
-		Cluster clus = tracksMatchedToClusters.get(origTrack);
-		tweakedTracksMatchedToClusters.put(tweakedTrack, clus); // over-writes sometimes, but that's OK.
-		clustersMatchedToTweakedTracks.put(clus, tweakedTrack);
-		mapOrigTrackToTweakedTrack.put(origTrack, tweakedTrack);
+	handleTrackMatchingAmbiguities(clustersMatchedToTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracksMatchedToClusters, clustersMatchedToTweakedTracks, tweakedTracks, mapOrigTrackToTweakedTrack);
+
+	// Check for cases where seed cluster is just plain too big
+	List<Track> flaggedBadTracks = new Vector<Track>();
+	for (Track tr : tweakedTracks) {
+	    if (electronTracks.contains(tr)) {
+		continue; // Electron clusters are not too big by construction (E/p ~ 1)
+	    }
+	    Cluster seed = tweakedTracksMatchedToClusters.get(tr);
+	    double seedEnergy = energy(seed);
+	    double tolerance = 3.0;
+	    boolean testValidEoverP = testEoverP_oneSided(seedEnergy, tr, tolerance);
+	    if (!testValidEoverP) {
+		// More than 3sigma off -- broken.
+		flaggedBadTracks.add(tr);
 	    }
 	}
-	for (Track tr : uniquelyMatchedTracks) {
-	    Cluster clus = tracksMatchedToClusters.get(tr);
-	    tweakedTracksMatchedToClusters.put(tr, clus);
-	    clustersMatchedToTweakedTracks.put(clus, tr);
+	for (Track tr : flaggedBadTracks) {
+	    // Track's seed has E>>p -- split it up.
+	    // First find the seed using the parent track...
+	    Cluster seed = tweakedTracksMatchedToClusters.get(tr);
+
+	    // We'll be redoing the matching, so if we have several tracks
+	    // bundled into one, we should split them up:
+	    List<Track> individualTracks = new Vector<Track>();
+	    if (tr instanceof MultipleTrackTrack) {
+		individualTracks.addAll(tr.getTracks());
+	    } else {
+		individualTracks.add(tr);
+	    }
+
+	    // Split the seed up
+	    Map<Track,Cluster> splitOutputMap = new HashMap<Track,Cluster>();
+	    List<Cluster> splitClusters = splitPhoton(seed, individualTracks, splitOutputMap, 10.0, false);
+	    if (m_debugSeedSplitting) {
+		System.out.println("DEBUG: Split big cluster with "+seed.getCalorimeterHits().size()+" hits into pieces...");
+		for (Track tmpTr : splitOutputMap.keySet()) {
+		    double p = (new BasicHep3Vector(tmpTr.getMomentum())).magnitude();
+		    Cluster clus = splitOutputMap.get(tmpTr);
+		    System.out.println(" * Track with p="+p+" mapped to piece with "+clus.getCalorimeterHits().size()+" hits.");
+		}
+	    }
+
+	    // Remove old listings / book-keeping info
+	    mipsOld.remove(seed);
+	    mipsNew.remove(seed);
+	    mips.remove(seed);
+	    clumps.remove(seed);
+	    leftoverHitClusters.remove(seed);
+	    treesWithNoStructure.remove(seed);
+	    clustersMatchedToTracks.remove(seed);
+	    tweakedTracksMatchedToClusters.remove(tr);
+	    clustersMatchedToTweakedTracks.remove(seed);
+	    tweakedTracks.remove(tr);
+	    for (Track dauTr : individualTracks) { 
+		tracksMatchedToClusters.remove(dauTr); 
+		uniquelyMatchedTracks.remove(dauTr);
+		ambiguouslyMatchedTracks.remove(dauTr);
+		mapOrigTrackToTweakedTrack.remove(dauTr);
+	    }
+	    List<Cluster> treesContainingOldSeed = new Vector<Cluster>();
+	    for (Cluster tree : targetsInTree.keySet()) {
+		List<Cluster> targets = targetsInTree.get(tree);
+		if (targets.contains(seed)) {
+		    treesContainingOldSeed.add(tree);
+		    targets.remove(seed);
+		}
+	    }
+
+	    // Some tracks in the bundle may not have a seed cluster any more. They will be jettisoned.
+	    // This is rather brutal -- really we should refine the matching somehow (iteratively?)
+	    // For the rest, see if they're now ambiguous or unique in the matching to newly broken-up seeds
+	    Map<Cluster, List<Track>> local_clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
+	    Map<Track,Cluster> local_tracksMatchedToClusters = new HashMap<Track,Cluster>();
+	    Set<Track> local_uniquelyMatchedTracks = new HashSet<Track>();
+	    Set<Track> local_ambiguouslyMatchedTracks = new HashSet<Track>();
+	    Map<Track, Cluster> local_tweakedTracksMatchedToClusters = new HashMap<Track, Cluster>();
+	    Map<Cluster, Track> local_clustersMatchedToTweakedTracks = new HashMap<Cluster, Track>();
+	    List<Track> local_tweakedTracks = new Vector<Track>();
+	    Map<Track, Track> local_mapOrigTrackToTweakedTrack = new HashMap<Track, Track>();
+	    for (Track dauTr : individualTracks) {
+		Cluster newSeed = splitOutputMap.get(dauTr);
+		if (newSeed != null) {
+		    List<Track> tracksMatchedToSeed = local_clustersMatchedToTracks.get(newSeed);
+		    if (tracksMatchedToSeed == null) { tracksMatchedToSeed = new Vector<Track>(); local_clustersMatchedToTracks.put(newSeed, tracksMatchedToSeed); }
+		    tracksMatchedToSeed.add(dauTr);
+		    local_tracksMatchedToClusters.put(dauTr, newSeed);
+		}
+	    }
+	    handleTrackMatchingAmbiguities(local_clustersMatchedToTracks, local_tracksMatchedToClusters, local_uniquelyMatchedTracks, local_ambiguouslyMatchedTracks, local_tweakedTracksMatchedToClusters, local_clustersMatchedToTweakedTracks, local_tweakedTracks, local_mapOrigTrackToTweakedTrack);
+
+	    // Book-keeping
+	    Set<Cluster> newSeeds = local_clustersMatchedToTracks.keySet();
+	    Set<Cluster> splitOutputMapSeeds = new HashSet<Cluster>();
+	    splitOutputMapSeeds.addAll(splitOutputMap.values());
+
+	    // Any piece not matched to a track becomes a photon
+	    for (Cluster piece : splitClusters) {
+		if (!newSeeds.contains(piece)) {
+		    chargedHadronLikePhotons.add(piece);
+		    modifiedPhotonClusters.add(piece);
+		}
+	    }
+
+	    // Any track not matched to anything is junked
+	    for (Track subTr : individualTracks) {
+		if (local_tracksMatchedToClusters.get(subTr) == null) {
+		    if (m_debugSeedSplitting) { System.out.println("DEBUG: After splitting cluster, couldn't find a match for track -- junking"); }
+		    unmatchedTracks.add(subTr);
+		}
+	    }
+
+	    // Now, for each track matched to a seed, re-test E/p. If it passes, keep it.
+	    for (Track newTweakedTrack : local_tweakedTracks) {
+		Cluster newSeed = local_tweakedTracksMatchedToClusters.get(newTweakedTrack);
+		if (newSeed == null) { throw new AssertionError("Seed is null, but it shouldn't be -- book-keeping error"); }
+		if (!newSeeds.contains(newSeed)) { throw new AssertionError("New seed is not in newSeeds!"); }
+		if (unmatchedTracks.contains(newTweakedTrack)) { throw new AssertionError("Book-keeping error!"); }
+		double newSeedEnergy = energy(newSeed);
+		double tolerance = 3.0;
+		boolean testValidEoverP_new = testEoverP_oneSided(newSeedEnergy, newTweakedTrack, tolerance);
+
+		if (testValidEoverP_new) {
+		    // REPLACE -- we managed to break off a piece of acceptable size.
+		    if (m_debugSeedSplitting) { System.out.println("DEBUG: Will replace old seed of "+seed.getCalorimeterHits().size()+" hits with new seed of "+newSeed.getCalorimeterHits().size()+" hits plus photon pieces"); }
+		    // Check if this is one track or several bundled together:
+		    List<Track> individualTracksOfNewTweakedTrack = new Vector<Track>();
+		    if (newTweakedTrack instanceof MultipleTrackTrack) {
+			individualTracksOfNewTweakedTrack.addAll(newTweakedTrack.getTracks());
+			ambiguouslyMatchedTracks.addAll(newTweakedTrack.getTracks());
+			for (Track subTr : newTweakedTrack.getTracks()) {
+			    mapOrigTrackToTweakedTrack.put(subTr, newTweakedTrack);
+			}
+		    } else {
+			uniquelyMatchedTracks.add(newTweakedTrack);
+			individualTracksOfNewTweakedTrack.add(newTweakedTrack);
+			mapOrigTrackToTweakedTrack.put(newTweakedTrack, newTweakedTrack);
+		    }
+		    // Add new listings
+		    clumps.add(newSeed);
+		    clustersMatchedToTracks.put(newSeed, individualTracksOfNewTweakedTrack);
+		    for (Track subTr : individualTracksOfNewTweakedTrack) {
+			tracksMatchedToClusters.put(subTr, newSeed);
+		    }
+		    tweakedTracks.add(newTweakedTrack);
+		    tweakedTracksMatchedToClusters.put(newTweakedTrack, newSeed);
+		    clustersMatchedToTweakedTracks.put(newSeed, newTweakedTrack);
+		    for (Cluster tree : treesContainingOldSeed) {
+			List<Cluster> targets = targetsInTree.get(tree);
+			targets.add(newSeed);
+		    }
+		} else {
+		    // FAIL ENTIRELY
+		    if (m_debugSeedSplitting) {
+			System.out.println("DEBUG: Unable to replace old seed of "+seed.getCalorimeterHits().size()+" hits with smaller seed. Will turn into photon and remove track from consideration.");
+			System.out.println("DEBUG: Was working with "+local_tweakedTracks.size()+" local tweaked tracks from "+individualTracks.size()+" individual tracks");
+			if (clustersMatchedToTracks.get(seed) != null) { System.out.println("OLD SEED MATCHED TO NON-NULL TRACK"); }
+		    }
+		    // Add new listings as a photon
+		    chargedHadronLikePhotons.add(newSeed);
+		    modifiedPhotonClusters.add(newSeed);
+		    unmatchedTracks.add(newTweakedTrack);
+		}
+	    }
 	}
 
 	// Track seeds
@@ -585,7 +812,13 @@
 	    sharedLeftoverHitClustersHCAL.rebuildHints();
 	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
 	} else if (steveMipShareForHaloHCAL) {
-	    SteveMIPReassignmentAlgorithm tmpMipAlg = new SteveMIPReassignmentAlgorithm(event, 1.0);
+	    String mapName;
+	    if (m_useSteveMipsForConeScoring) {
+		mapName = "TrackMipMap";
+	    } else {
+		mapName = "ShowerFinderMapTrackToMip";
+	    }
+	    SteveMIPReassignmentAlgorithm tmpMipAlg = new SteveMIPReassignmentAlgorithm(event, 1.0, mapName);
 	    DownstreamTrackClusterSharingAlgorithm coneSharingAlgHCAL  = new DownstreamTrackClusterSharingAlgorithm(clustersMatchedToTweakedTracks, tmpMipAlg); // TEST
 	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneSharingAlgHCAL); // TEST
 	    List<Cluster> tmpSeedList = new Vector<Cluster>(); // TEST
@@ -594,8 +827,6 @@
 	    sharedLeftoverHitClustersHCAL.rebuildHints();
 	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
 	} else if (taejeongMipShareForHaloHCAL) {
-	    LocalHelixExtrapolator tmpFindCluster = new LocalHelixExtrapolator(); // TEST    
-	    tmpFindCluster.process(m_event); // TEST
 	    Map<Track, Set<Cluster>> tmpMap = new HashMap<Track, Set<Cluster>>(); // TEST
 	    for (Cluster clus : clustersMatchedToTweakedTracks.keySet()) {
 		Track tr = clustersMatchedToTweakedTracks.get(clus);
@@ -606,7 +837,6 @@
 	    }
 	    System.out.println("DEBUG: [seeds] contains "+seeds.size()+" clusters.");
 	    System.out.println("DEBUG: [clustersMatchedToTweakedTracks] contains "+clustersMatchedToTweakedTracks.keySet().size()+" clusters.");
-	    //CachedMIPReassignmentAlgorithm tmpMipAlg = new CachedMIPReassignmentAlgorithm(1.0, tmpFindCluster, tmpMap, mips, allHits); // TEST
 	    MIPReassignmentAlgorithm tmpMipAlg = null; // FIXME: Use actual constructor
 	    DownstreamTrackClusterSharingAlgorithm coneSharingAlgHCAL  = new DownstreamTrackClusterSharingAlgorithm(clustersMatchedToTweakedTracks, tmpMipAlg); // TEST
 	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneSharingAlgHCAL); // TEST
@@ -763,20 +993,11 @@
 	    }
 	}
 
-	LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
-	findCluster.process(m_event); // picks up geometry
-	ShowerPointFinder showerFinder = new ShowerPointFinder(findCluster, allHits, tracksMatchedToClusters);
-	Map<Track,BasicCluster> MapTrkToMIP = showerFinder.findMips(); 
-	event.put("ShowerFinderMapTrackToMip", MapTrkToMIP);
-	List<Cluster> preShowerMips = new Vector<Cluster>();
-	preShowerMips.addAll(MapTrkToMIP.values());
-	event.put("ShowerFinderMips", preShowerMips);
-
 	ReassignClustersAlgorithm algorithm = null;
 	if (m_useSimpleConeForReassignment) {
 	    algorithm = new ConeReassignmentAlgorithm(1.0, findCluster);
 	} else {
-	    algorithm = new MIPReassignmentAlgorithm(0.79, findCluster, MapTrkToMIP);
+	    algorithm = new MIPReassignmentAlgorithm(1.05, findCluster, MapTrkToMIP, 7);
 	}
 	if (m_fixSingleTracksWithCone) {
 	    // First, try to fix the simplest case: single tracks with E/p < 1
@@ -817,6 +1038,9 @@
 	m_event = null;
     }
 
+    Vector<Double> m_tmp_calib = new Vector<Double>();
+    Vector<Double> m_tmp_calibErr = new Vector<Double>();
+
     boolean checkIfReassignmentNeeded(Set<Track> jet, Set<Cluster> showerComponents, List<SharedClusterGroup> allSharedClusters, double tolerance)
     {
 	double jetMomentum = jetScalarMomentum(jet);
@@ -1327,7 +1551,82 @@
 	}
     }
 
+    private void findTrackSeedInFirstLayersOnly(Track tr, Map<Integer, Set<CalorimeterHit>> unusedHitsByLayer, List<CalorimeterHit> trackSeedHits, Map<CalorimeterHit, Hep3Vector> mapTrackSeedToTangent, Map<Track, CalorimeterHit> mapTrackToTrackSeed, double cutTrackSeedDist) {
+	LocalHelixExtrapolationTrackClusterMatcher debugTrackMatch = new LocalHelixExtrapolationTrackClusterMatcher();
+	debugTrackMatch.process(m_event);
+	List<Cluster> tmpListLayer0 = new Vector<Cluster>();
+	List<Cluster> tmpListLayer1 = new Vector<Cluster>();
+	if (unusedHitsByLayer.get(0) != null) {
+	    for (CalorimeterHit hit : unusedHitsByLayer.get(0)) {
+		if (!trackSeedHits.contains(hit)) {
+		    BasicCluster tmpClus = new BasicCluster();
+		    tmpClus.addHit(hit);
+		    tmpListLayer0.add(tmpClus);
+		}
+	    }
+	}
+	if (unusedHitsByLayer.get(1) != null) {
+	    for (CalorimeterHit hit : unusedHitsByLayer.get(1)) {
+		if (!trackSeedHits.contains(hit)) {
+		    BasicCluster tmpClus = new BasicCluster();
+		    tmpClus.addHit(hit);
+		    tmpListLayer1.add(tmpClus);
+		}
+	    }
+	}
+	Cluster seedClusToUse = null;
+	Cluster tmpMatchedClusterLayer0 = debugTrackMatch.matchTrackToCluster(tr, tmpListLayer0);
+	Hep3Vector interceptPointLayer0 = debugTrackMatch.getExtrapolator().extendToECALLayer(0);
+	Cluster tmpMatchedClusterLayer1 = debugTrackMatch.matchTrackToCluster(tr, tmpListLayer1);
+	Hep3Vector interceptPointLayer1 = debugTrackMatch.getExtrapolator().extendToECALLayer(1);
+	if (tmpMatchedClusterLayer0 == null && tmpMatchedClusterLayer1 == null) {
+	    // No seed found for this track -- maybe extrapolation is too imprecise.
+	    // Handle it the usual way.
+	} else if (tmpMatchedClusterLayer0 == null && tmpMatchedClusterLayer1 != null) {
+	    seedClusToUse = tmpMatchedClusterLayer1;
+	} else if (tmpMatchedClusterLayer0 != null && tmpMatchedClusterLayer1 == null) {
+	    seedClusToUse = tmpMatchedClusterLayer0;
+	} else {
+	    // Hits found in both layers -- need to pick one.
+	    // Look at distance from track intercept point IN LAYER 0 to hit.
+	    // This favours the hits in the innermost layer... which is what we
+	    // want in general.
+	    Hep3Vector positionOfMatchedClusterLayer0 = new BasicHep3Vector(tmpMatchedClusterLayer0.getCalorimeterHits().iterator().next().getPosition());
+	    Hep3Vector positionOfMatchedClusterLayer1 = new BasicHep3Vector(tmpMatchedClusterLayer1.getCalorimeterHits().iterator().next().getPosition());
+	    double distFromFaceToHit0 = VecOp.sub(positionOfMatchedClusterLayer0, interceptPointLayer0).magnitude();
+	    double distFromFaceToHit1 = VecOp.sub(positionOfMatchedClusterLayer1, interceptPointLayer0).magnitude();
+	    if (distFromFaceToHit0 > distFromFaceToHit1) {
+		seedClusToUse = tmpMatchedClusterLayer1;
+	    } else {
+		seedClusToUse = tmpMatchedClusterLayer0;
+	    }
+	}
+	CalorimeterHit seedHitToUse = null;
+	if (seedClusToUse != null) {
+	    seedHitToUse = seedClusToUse.getCalorimeterHits().iterator().next();
+	    // Require within a certain distance (sanity check)
+	    Hep3Vector positionOfSeedHit = new BasicHep3Vector(seedHitToUse.getPosition());
+	    Hep3Vector trackExtrapPointInLayer = debugTrackMatch.getExtrapolator().extendToECALLayer(getLayer(seedHitToUse));
+	    // It's possible (but rare) for trackExtrapPointInLayer to be null -- e.g. if track just clipped the calorimeter
+	    // and never entered layer 2. Watch for that case.
+	    if (trackExtrapPointInLayer != null) {
+		double distForCut = VecOp.sub(positionOfSeedHit, trackExtrapPointInLayer).magnitude();
+		if (distForCut < cutTrackSeedDist) {
+		    // Within 1cm
+		    trackSeedHits.add(seedHitToUse);
+		    Hep3Vector tangent = VecOp.unit(VecOp.sub(interceptPointLayer1, interceptPointLayer0));
+		    mapTrackSeedToTangent.put(seedHitToUse, tangent);
+		    mapTrackToTrackSeed.put(tr, seedHitToUse);
+		}
+	    }
+	}
+    }
+
     protected List<Cluster> splitPhoton(Cluster photon, List<Track> tracksOfThisPhoton, Map<Track,Cluster> mapTrackToCluster) {
+	return splitPhoton(photon, tracksOfThisPhoton, mapTrackToCluster, 10.0, true);
+    }
+
+    protected List<Cluster> splitPhoton(Cluster photon, List<Track> tracksOfThisPhoton, Map<Track,Cluster> mapTrackToCluster, double cutTrackSeedDist, boolean trackSeedsInFirstLayersOnly) {
 	// Output
 	List<Cluster> output = new Vector<Cluster>();
 	if (mapTrackToCluster == null || mapTrackToCluster.size() != 0) { throw new AssertionError("Must supply initialized, blank map"); }
@@ -1353,77 +1652,42 @@
 	Map<CalorimeterHit, Hep3Vector> mapTrackSeedToTangent = new HashMap<CalorimeterHit, Hep3Vector>();
 	Map<Track, CalorimeterHit> mapTrackToTrackSeed = new HashMap<Track, CalorimeterHit>();
 	for (Track tr : tracksOfThisPhoton) {
-	    LocalHelixExtrapolationTrackClusterMatcher debugTrackMatch = new LocalHelixExtrapolationTrackClusterMatcher();
-	    debugTrackMatch.process(m_event);
-	    List<Cluster> tmpListLayer0 = new Vector<Cluster>();
-	    List<Cluster> tmpListLayer1 = new Vector<Cluster>();
-	    if (unusedHitsByLayer.get(0) != null) {
-		for (CalorimeterHit hit : unusedHitsByLayer.get(0)) {
-		    if (!trackSeedHits.contains(hit)) {
-			BasicCluster tmpClus = new BasicCluster();
-			tmpClus.addHit(hit);
-			tmpListLayer0.add(tmpClus);
-		    }
-		}
-	    }
-	    if (unusedHitsByLayer.get(1) != null) {
-		for (CalorimeterHit hit : unusedHitsByLayer.get(1)) {
-		    if (!trackSeedHits.contains(hit)) {
-			BasicCluster tmpClus = new BasicCluster();
-			tmpClus.addHit(hit);
-			tmpListLayer1.add(tmpClus);
-		    }
-		}
-	    }
-	    Cluster seedClusToUse = null;
-	    Cluster tmpMatchedClusterLayer0 = debugTrackMatch.matchTrackToCluster(tr, tmpListLayer0);
-	    Hep3Vector interceptPointLayer0 = debugTrackMatch.getExtrapolator().extendToECALLayer(0);
-	    Cluster tmpMatchedClusterLayer1 = debugTrackMatch.matchTrackToCluster(tr, tmpListLayer1);
-	    Hep3Vector interceptPointLayer1 = debugTrackMatch.getExtrapolator().extendToECALLayer(1);
-	    if (tmpMatchedClusterLayer0 == null && tmpMatchedClusterLayer1 == null) {
-		// No seed found for this track -- maybe extrapolation is too imprecise.
-		// Handle it the usual way.
-	    } else if (tmpMatchedClusterLayer0 == null && tmpMatchedClusterLayer1 != null) {
-		seedClusToUse = tmpMatchedClusterLayer1;
-	    } else if (tmpMatchedClusterLayer0 != null && tmpMatchedClusterLayer1 == null) {
-		seedClusToUse = tmpMatchedClusterLayer0;
+	    if (trackSeedsInFirstLayersOnly) {
+		findTrackSeedInFirstLayersOnly(tr, unusedHitsByLayer, trackSeedHits, mapTrackSeedToTangent, mapTrackToTrackSeed, cutTrackSeedDist);
 	    } else {
-		// Hits found in both layers -- need to pick one.
-		// Look at distance from track intercept point IN LAYER 0 to hit.
-		// This favours the hits in the innermost layer... which is what we
-		// want in general.
-		Hep3Vector positionOfMatchedClusterLayer0 = new BasicHep3Vector(tmpMatchedClusterLayer0.getCalorimeterHits().iterator().next().getPosition());
-		Hep3Vector positionOfMatchedClusterLayer1 = new BasicHep3Vector(tmpMatchedClusterLayer1.getCalorimeterHits().iterator().next().getPosition());
-		double distFromFaceToHit0 = VecOp.sub(positionOfMatchedClusterLayer0, interceptPointLayer0).magnitude();
-		double distFromFaceToHit1 = VecOp.sub(positionOfMatchedClusterLayer1, interceptPointLayer0).magnitude();
-		if (distFromFaceToHit0 > distFromFaceToHit1) {
-		    seedClusToUse = tmpMatchedClusterLayer1;
-		} else {
-		    seedClusToUse = tmpMatchedClusterLayer0;
-		}
-	    }
-	    CalorimeterHit seedHitToUse = null;
-	    if (seedClusToUse != null) {
-		seedHitToUse = seedClusToUse.getCalorimeterHits().iterator().next();
-		// Require within a certain distance (sanity check)
-		Hep3Vector positionOfSeedHit = new BasicHep3Vector(seedHitToUse.getPosition());
-		Hep3Vector trackExtrapPointInLayer = debugTrackMatch.getExtrapolator().extendToECALLayer(getLayer(seedHitToUse));
-		// It's possible (but rare) for trackExtrapPointInLayer to be null -- e.g. if track just clipped the calorimeter
-		// and never entered layer 2. Watch for that case.
-		if (trackExtrapPointInLayer != null) {
-		    double distForCut = VecOp.sub(positionOfSeedHit, trackExtrapPointInLayer).magnitude();
-		    if (distForCut < 10.0) {
-		    	// Within 1cm
-		    	trackSeedHits.add(seedHitToUse);
-		    	Hep3Vector tangent = VecOp.unit(VecOp.sub(interceptPointLayer1, interceptPointLayer0));
-		    	mapTrackSeedToTangent.put(seedHitToUse, tangent);
-		    	mapTrackToTrackSeed.put(tr, seedHitToUse);
+		LocalHelixExtrapolationTrackClusterMatcher debugTrackMatch = new LocalHelixExtrapolationTrackClusterMatcher();
+		debugTrackMatch.process(m_event);
+		for (int iLayer=minLayer; iLayer<maxLayer; iLayer++) {
+		    Set<CalorimeterHit> unusedHitsInLayer = unusedHitsByLayer.get(iLayer);
+		    List<Cluster> tmpClusterList = new Vector<Cluster>();
+		    for (CalorimeterHit hit : unusedHitsInLayer) {
+			if (!trackSeedHits.contains(hit)) {
+			    BasicCluster tmpClus = new BasicCluster();
+			    tmpClus.addHit(hit);
+			    tmpClusterList.add(tmpClus);
+			}
+		    }
+		    Cluster bestClusterMatchInLayer = debugTrackMatch.matchTrackToCluster(tr, tmpClusterList);
+		    Hep3Vector interceptPointInLayer = debugTrackMatch.getExtrapolator().extendToECALLayer(iLayer);
+		    if (bestClusterMatchInLayer != null) {
+			CalorimeterHit seedHit = bestClusterMatchInLayer.getCalorimeterHits().get(0);
+			Hep3Vector positionOfMatchedHit = new BasicHep3Vector(seedHit.getPosition());
+			double transverseDistance = VecOp.sub(positionOfMatchedHit, interceptPointInLayer).magnitude();
+			if (transverseDistance < cutTrackSeedDist) {
+			    // Within 1cm => OK
+			    trackSeedHits.add(seedHit);
+			    Hep3Vector tangent = debugTrackMatch.getExtrapolator().getTangent(interceptPointInLayer, tr);
+			    mapTrackSeedToTangent.put(seedHit, tangent);
+			    mapTrackToTrackSeed.put(tr, seedHit);
+			    break; // stop looping over layers
+			}
 		    }
 		}
 	    }
 	}
 
 	// Start building from track seeds:
+	double dotProductCutForTracks = 0.7; // FIXME: Use 0.85?
 	for (Track tr : tracksOfThisPhoton) {
 	    if (m_photonSplitDebug) { System.out.println("DEBUG: Attempting to build cluster for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()); }
 	    CalorimeterHit seed = mapTrackToTrackSeed.get(tr);
@@ -1431,15 +1695,20 @@
 		if (unusedHitsByLayer.get(getLayer(seed)).contains(seed)) {
 		    // Not yet used => build a cluster from it
 		    Hep3Vector tangent = mapTrackSeedToTangent.get(seed);
-		    Cluster protoCluster = buildProtoCluster(seed, unusedHitsByLayer, tangent);
+		    Cluster protoCluster = buildProtoCluster(seed, unusedHitsByLayer, tangent, dotProductCutForTracks);
 		    output.add(protoCluster);
 		    mapTrackToCluster.put(tr, protoCluster);
 		    if (m_photonSplitDebug) { debugPrintProtoCluster(protoCluster, photon); }
+		} else {
+		    if (m_photonSplitDebug) { System.out.println("DEBUG: Unable to build cluster for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" because seed was not unused"); }
 		}
+	    } else {
+		if (m_photonSplitDebug) { System.out.println("DEBUG: Unable to build cluster for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" because seed was null"); }
 	    }
 	}
 
 	// Start building from non-track seeds:
+	double dotProductCutForNonTracks = 0.7;
 	if (m_photonSplitDebug) { System.out.println("DEBUG: Attempting to build clusters from non-track seeds:"); }
 	for (int iLayer=minLayer; iLayer<=maxLayer; iLayer++) {
 	    Set<CalorimeterHit> unusedHitsInCurrentLayer = unusedHitsByLayer.get(iLayer);
@@ -1447,7 +1716,7 @@
 		CalorimeterHit seed = unusedHitsInCurrentLayer.iterator().next();
 		Hep3Vector seedPosition = new BasicHep3Vector(seed.getPosition());
 		Hep3Vector tangentAtSeed = VecOp.unit(seedPosition);
-		Cluster protoCluster = buildProtoCluster(seed, unusedHitsByLayer, tangentAtSeed);
+		Cluster protoCluster = buildProtoCluster(seed, unusedHitsByLayer, tangentAtSeed, dotProductCutForNonTracks);
 		output.add(protoCluster);
 		if (m_photonSplitDebug) { debugPrintProtoCluster(protoCluster, photon); }
 	    }
@@ -1474,7 +1743,7 @@
 	}
     }
 
-    Cluster buildProtoCluster(CalorimeterHit seed, Map<Integer, Set<CalorimeterHit>> unusedHitsByLayer, Hep3Vector tangentAtSeed) {
+    Cluster buildProtoCluster(CalorimeterHit seed, Map<Integer, Set<CalorimeterHit>> unusedHitsByLayer, Hep3Vector tangentAtSeed, double dotProductCut) {
 	int iLayer = getLayer(seed);
 	Set<CalorimeterHit> protoCluster = new HashSet<CalorimeterHit>();
 	protoCluster.add(seed);
@@ -1493,7 +1762,7 @@
 	unusedHitsByLayer.get(iLayer).removeAll(protoCluster);
 	int currentProtoClusterExpansionLayer = iLayer;
 	while (true) {
-	    Set<CalorimeterHit> extraProtoClusterHits = expandProtoCluster(protoCluster, currentProtoClusterExpansionLayer, unusedHitsByLayer, tangentAtSeed);
+	    Set<CalorimeterHit> extraProtoClusterHits = expandProtoCluster(protoCluster, currentProtoClusterExpansionLayer, unusedHitsByLayer, tangentAtSeed, dotProductCut);
 	    for (CalorimeterHit hit : extraProtoClusterHits) {
 		int hitLayer = getLayer(hit);
 		unusedHitsByLayer.get(hitLayer).remove(hit);
@@ -1512,7 +1781,7 @@
 	return newClus;
     }
 
-    Set<CalorimeterHit> expandProtoCluster(Set<CalorimeterHit> protoCluster, int currentProtoClusterExpansionLayer, Map<Integer, Set<CalorimeterHit>> unusedHitsByLayer, Hep3Vector tangentAtSeed) {
+    Set<CalorimeterHit> expandProtoCluster(Set<CalorimeterHit> protoCluster, int currentProtoClusterExpansionLayer, Map<Integer, Set<CalorimeterHit>> unusedHitsByLayer, Hep3Vector tangentAtSeed, double dotProductCut) {
 	Set<CalorimeterHit> output = new HashSet<CalorimeterHit>();
 	List<CalorimeterHit> protoClusterHitsInLayer = new Vector<CalorimeterHit>();
 	for (CalorimeterHit hit : protoCluster) {
@@ -1525,7 +1794,6 @@
 	    //   a) Within n layers (moving outward)
 	    //   b) Inside a cone of angle theta
 	    int n = 2;
-	    double dotProductCut = 0.7; // Corresponds to theta = 20 degrees
 	    Hep3Vector hitPosition = new BasicHep3Vector(hit.getPosition());
 	    for (int targetLayer = currentProtoClusterExpansionLayer+1; targetLayer <= currentProtoClusterExpansionLayer+n; targetLayer++) {
 		Set<CalorimeterHit> unusedHitsInTargetLayer = unusedHitsByLayer.get(targetLayer);
@@ -2934,10 +3202,13 @@
 	Set<Cluster> allUsedCores = new HashSet<Cluster>();
 	allUsedCores    .addAll(neutralClusterCoresUsed);
 	allUsedCoresList.addAll(neutralClusterCoresUsed);
+	testForHitUniqueness(allUsedCores);
 	if (allUsedCores.size() != allUsedCoresList.size()) { throw new AssertionError("Mis-counting of clusters: "+allUsedCores.size()+" vs "+allUsedCoresList.size()); }
 	allUsedCores    .addAll(photonsUsed);
 	allUsedCoresList.addAll(photonsUsed);
 	if (allUsedCores.size() != allUsedCoresList.size()) { throw new AssertionError("Mis-counting of clusters: "+allUsedCores.size()+" vs "+allUsedCoresList.size()); }
+	testForHitUniqueness(photonsUsed);
+	testForHitUniqueness(allUsedCores);
 	for (Track tr : tracksSortedByMomentum) {
 	    Set<Track> jet = mapTrackToJet.get(tr);
 	    boolean useJetInstead = m_clusterAsJets && jet != null;
@@ -2953,6 +3224,7 @@
 	    }
 	}
 	if (allUsedCores.size() != allUsedCoresList.size()) { throw new AssertionError("Mis-counting of clusters: "+allUsedCores.size()+" vs "+allUsedCoresList.size()); }
+	testForHitUniqueness(allUsedCores);
 	if (m_clusterAsJets) {
 	    for (Set<Track> jet : jets) {
 		for (Cluster clus : newMapJetToShowerComponents.get(jet)) {
@@ -3052,6 +3324,16 @@
 	}
     }
 
+    void testForHitUniqueness(Collection<Cluster> clusters) {
+	Set<CalorimeterHit> hits = new HashSet<CalorimeterHit>();
+	List<CalorimeterHit> hitsList = new Vector<CalorimeterHit>();
+	for (Cluster clus : clusters) {
+	    hits    .addAll(clus.getCalorimeterHits());
+	    hitsList.addAll(clus.getCalorimeterHits());
+	}
+	if (hits.size() != hitsList.size()) { throw new AssertionError("Mis-counting of hits: "+hits.size()+" vs "+hitsList.size()); }
+    }
+
     void clusteringIteration(List<Track> tracksSortedByMomentum, Map<Track, Cluster> tweakedTracksMatchedToClusters, List<Track> electronTracks, Map<Track, Set<Cluster>> newMapTrackToShowerComponents, Map<Cluster, Track> newMapShowerComponentToTrack, Map<Track, Set<Cluster>> newMapTrackToVetoedAdditions, Map<Track, Set<Cluster>> mapTrackToVetoedAdditionsDueToTrackSeed, List<SharedClusterGroup> allSharedClusters, Map<Track,Double> newMapTrackToThreshold, Map<Track,Double> newMapTrackToTolerance) {
 	// Pop the seeds in first:
 	for (Track tr : tracksSortedByMomentum) {
@@ -3589,6 +3871,56 @@
 	m_event.put("DebugMipsRejected", debugList, Cluster.class, flag);
     }
 
-
+    void handleTrackMatchingAmbiguities(Map<Cluster, List<Track>> clustersMatchedToTracks,
+					Map<Track,Cluster> tracksMatchedToClusters,
+					Set<Track> uniquelyMatchedTracks,
+					Set<Track> ambiguouslyMatchedTracks,
+					Map<Track, Cluster> tweakedTracksMatchedToClusters,
+					Map<Cluster, Track> clustersMatchedToTweakedTracks,
+					List<Track> tweakedTracks,
+					Map<Track, Track> mapOrigTrackToTweakedTrack
+					)
+    {
+	if (uniquelyMatchedTracks == null || uniquelyMatchedTracks.size()!=0) { throw new AssertionError("Output collection not in correct format"); }
+	if (ambiguouslyMatchedTracks == null || ambiguouslyMatchedTracks.size()!=0) { throw new AssertionError("Output collection not in correct format"); }
+	if (tweakedTracksMatchedToClusters == null || tweakedTracksMatchedToClusters.size()!=0) { throw new AssertionError("Output collection not in correct format"); }
+	if (clustersMatchedToTweakedTracks == null || clustersMatchedToTweakedTracks.size()!=0) { throw new AssertionError("Output collection not in correct format"); }
+	if (tweakedTracks == null || tweakedTracks.size()!=0) { throw new AssertionError("Output collection not in correct format"); }
+	if (mapOrigTrackToTweakedTrack == null || mapOrigTrackToTweakedTrack.size()!=0)  { throw new AssertionError("Output collection not in correct format"); }
+	List<Set<Track>> ambiguouslyMatchedTrackGroups = new Vector<Set<Track>>();
+	for (Cluster clus : clustersMatchedToTracks.keySet()) {
+	    List<Track> tracksMatchedToThisCluster = clustersMatchedToTracks.get(clus);
+	    if (tracksMatchedToThisCluster.size() > 1) {
+		ambiguouslyMatchedTracks.addAll(tracksMatchedToThisCluster);
+		Set<Track> newSet = new HashSet<Track>();
+		newSet.addAll(tracksMatchedToThisCluster);
+		ambiguouslyMatchedTrackGroups.add(newSet);
+	    } else {
+		uniquelyMatchedTracks.addAll(tracksMatchedToThisCluster);
+	    }
+	}
+	if (uniquelyMatchedTracks.size() + ambiguouslyMatchedTracks.size() != tracksMatchedToClusters.keySet().size()) { throw new AssertionError("Book-keeping error: "+uniquelyMatchedTracks.size()+" + "+ambiguouslyMatchedTracks.size()+" != "+tracksMatchedToClusters.keySet().size()); }	
+	// Redo ambiguous tracks hackishly:
+	// Similarly, redo track -> cluster map to use tweaked tracks
+	tweakedTracks.addAll(uniquelyMatchedTracks);
+	for (Track tr : uniquelyMatchedTracks) {
+	    mapOrigTrackToTweakedTrack.put(tr,tr);
+	}
+		for (Set<Track> trackSet : ambiguouslyMatchedTrackGroups) {
+	    MultipleTrackTrack tweakedTrack = new MultipleTrackTrack(trackSet);
+	    tweakedTracks.add(tweakedTrack);
+	    for (Track origTrack : trackSet) {
+		Cluster clus = tracksMatchedToClusters.get(origTrack);
+		tweakedTracksMatchedToClusters.put(tweakedTrack, clus); // over-writes sometimes, but that's OK.
+		clustersMatchedToTweakedTracks.put(clus, tweakedTrack);
+		mapOrigTrackToTweakedTrack.put(origTrack, tweakedTrack);
+	    }
+	}
+	for (Track tr : uniquelyMatchedTracks) {
+	    Cluster clus = tracksMatchedToClusters.get(tr);
+	    tweakedTracksMatchedToClusters.put(tr, clus);
+	    clustersMatchedToTweakedTracks.put(clus, tr);
+	}
+    }
 }
 

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.28 -> 1.29
diff -u -r1.28 -r1.29
--- ReclusterDriver.java	20 Jul 2008 22:50:03 -0000	1.28
+++ ReclusterDriver.java	4 Aug 2008 17:38:17 -0000	1.29
@@ -25,6 +25,7 @@
 import org.lcsim.recon.pfa.structural.FuzzyQPhotonClusterEnergyCalculator;
 import org.lcsim.recon.pfa.structural.FuzzyPhotonClusterEnergyCalculator;
 import org.lcsim.recon.pfa.structural.FuzzyNeutralHadronClusterEnergyCalculator;
+import org.lcsim.util.aida.AIDA;
 
 /**
   * An algorithm to recluster showers using E/p information
@@ -37,7 +38,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.28 2008/07/20 22:50:03 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.29 2008/08/04 17:38:17 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -53,7 +54,8 @@
 
     boolean m_useOldCalibration = false;
     boolean m_useAnalogHcalCalibration = false;
-    boolean m_useSteveMipsForChargedCalibration = true;
+    boolean m_useSteveMipsForChargedCalibration = false;
+    boolean m_useSteveMipsForConeScoring = false;
 
     boolean m_megaDebug = false;
     boolean m_debug = false;
@@ -82,6 +84,8 @@
     protected int m_punchThroughHitMinimum = 4;
     protected boolean m_checkSharedHitsForPunchThrough = true;
 
+    protected boolean m_debugLinkPlots = false;
+
     protected ReclusterDriver() {
 	// Gah, debug only!
     }
@@ -146,9 +150,11 @@
 	} else {
 	    org.lcsim.recon.pfa.structural.FuzzyQNeutralHadronClusterEnergyCalculator newNeutralCalib = new org.lcsim.recon.pfa.structural.FuzzyQNeutralHadronClusterEnergyCalculator(m_useAnalogHcalCalibration);
 	    m_neutralCalib = newNeutralCalib;
-	    String mipListName = "mips";
+	    String mipListName;
 	    if (m_useSteveMipsForChargedCalibration) {
 		mipListName = "TMClusters";
+	    } else {
+		mipListName = "ShowerFinderMips";
 	    }
 	    org.lcsim.recon.pfa.structural.ChargedHadronClusterEnergyCalculator chargedCalibration = new org.lcsim.recon.pfa.structural.ChargedHadronClusterEnergyCalculator(mipListName, newNeutralCalib);
 	    m_chargedCalib = chargedCalibration;
@@ -957,6 +963,18 @@
 	return (minDistanceToSurface > 40.0); // 4cm
     }
 
+    protected void fillLinkPlot(Cluster clus1, Cluster clus2, double score, String name) {
+	MCParticle part1 = quoteDominantParticle(clus1);
+	MCParticle part2 = quoteDominantParticle(clus2);
+	if (part1 == part2) {
+	    String fullName = new String("links/"+name+"_pass");
+	    AIDA.defaultInstance().cloud1D(fullName).fill(score);
+	} else {
+	    String fullName = new String("links/"+name+"_fail");
+	    AIDA.defaultInstance().cloud1D(fullName).fill(score);
+	}
+    }
+
     protected void initPotentialLinks_PhotonMip(Collection<Cluster> photons, Collection<Cluster> mips, double scaleFactorTrackToClump) {
 	List<Cluster> photonsNotAtInnerSurface = new Vector<Cluster>();
 	for (Cluster photon : photons) {
@@ -1022,7 +1040,13 @@
     protected void initPotentialLinks_Cone(Collection<Cluster> seeds, Collection<Cluster> linkableClusters, Map<Cluster, Track> clustersMatchedToTweakedTracks, double scaleOK, double scaleMin) {
 	SteveMipWrapper tmpWrapper = new SteveMipWrapper();
 	tmpWrapper.process(m_event);
-	SteveMIPReassignmentAlgorithm mipAlg = new SteveMIPReassignmentAlgorithm(m_event, 1.0);
+	String mapName;
+	if (m_useSteveMipsForConeScoring) {
+	    mapName = "TrackMipMap";
+	} else {
+	    mapName = "ShowerFinderMapTrackToMip";
+	}
+	SteveMIPReassignmentAlgorithm mipAlg = new SteveMIPReassignmentAlgorithm(m_event, 1.0, mapName);
 	for (Cluster seed : seeds) {
 	    // Setup: Find what track the seed is connected to.
 	    Track tr = clustersMatchedToTweakedTracks.get(seed);
@@ -1092,6 +1116,7 @@
 			if (!linkAlreadyExists) {
 			    // No pre-existing link
 			    addPotentialLink(seed, clus, score);
+			    if (m_debugLinkPlots) { fillLinkPlot(clus, seed, score, "Cone"); }
 			} else {
 			    // Pre-existing link
 			    double oldScore = previousLink.score();
@@ -1114,6 +1139,7 @@
 				boolean linkExistsAfterAdd2 = checkForLink(clus, seed);
 				if (!linkExistsAfterAdd1) { throw new AssertionError("Book-keeping error!"); }
 				if (!linkExistsAfterAdd2) { throw new AssertionError("Book-keeping error!"); }
+				if (m_debugLinkPlots) { fillLinkPlot(clus, seed, score, "Cone"); }
 			    }
 			}
 		    }
@@ -1142,6 +1168,7 @@
 		    }
 		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
 		    addPotentialLink(mip1, mip2, score);
+		    if (m_debugLinkPlots) { fillLinkPlot(mip1, mip2, score, "MipMip"); }
 		    //if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip1.getCalorimeterHits().size()+"] -- MIP["+mip2.getCalorimeterHits().size()+"]  = "+score+"  (not in same large cluster)"); }
 		    if (m_debugLinkScores) { debugPrintLink(mip1, mip2, "MIP", "MIP", score); }
 		}
@@ -1168,6 +1195,7 @@
 		    }
 		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
 		    addPotentialLink(mip1, mip2, score);
+		    if (m_debugLinkPlots) { fillLinkPlot(mip1, mip2, score, "MipMip"); }
 		    if (m_debugLinkScores) { debugPrintLink(mip1, mip2, "MIP", "MIP", score); }
 		}
 	    }
@@ -1192,6 +1220,7 @@
 		    }
 		    if (score > 1.0) { score = 1.0; } else if (score < 0.0) { score = 0.0; }
 		    addPotentialLink(mip, clump, score);
+		    if (m_debugLinkPlots) { fillLinkPlot(mip, clump, score, "MipClump"); }
 		    //if (m_debugLinkScores) { System.out.println("DEBUG:   MIP["+mip.getCalorimeterHits().size()+"] -- Clump["+clump.getCalorimeterHits().size()+"]  = "+score); }
 		    if (m_debugLinkScores) { debugPrintLink(mip, clump, "MIP", "Clump", score); }
 		}
@@ -1207,6 +1236,7 @@
 		if (!linkAlreadyExists) {
 		    double score = scoreOnProximityAndPointing(mip, clus, thresholdForProximity);
 		    addPotentialLink(mip, clus, score);
+		    if (m_debugLinkPlots) { fillLinkPlot(mip, clus, score, "MipMisc"); }
 		    if (m_debugLinkScores) { debugPrintLink(mip, clus, "MIP", typeName, score); }
 		    if (m_debugLinkScores) { 
 			double likelihood = m_eval.getLinkLikelihoodTrackToClump(mip,clus);
@@ -1234,6 +1264,7 @@
 		if (!linkAlreadyExists) {
 		    double score = scoreOnProximityAndAngle(clus1, clus2, thresholdForProximity);
 		    addPotentialLink(clus1, clus2, score);
+		    if (m_debugLinkPlots) { fillLinkPlot(clus1, clus2, score, "MiscMisc"); }
 		    if (m_debugLinkScores) { debugPrintLink(clus1, clus2, type1, type2, score); }
 		}
 	    }
@@ -1258,6 +1289,7 @@
 		    	score *= penaltyFactor;
 		    }
 		    addPotentialLink(clus1, clus2, score);
+		    if (m_debugLinkPlots) { fillLinkPlot(clus1, clus2, score, "MiscSelf"); }
 		    if (m_debugLinkScores) { debugPrintLink(clus1, clus2, typeName, typeName, score); }
 		}
 	    }
@@ -1360,20 +1392,26 @@
 	    double realPurity = quotePurity_T(tmpTrackList, showerComponents, allSharedClusters);
 	    double coreEffic = quoteEfficiency_T(tmpTrackList, showerComponents);
 	    double corePurity = quotePurity_T(tmpTrackList, showerComponents);
+	    boolean trackPunchThrough = isPunchThrough(showerComponents, allSharedClusters);
 	    String printme = new String();
 	    if (fromJet) {
 		printme += "JET";
 	    } else {
 		printme += "   ";
-		double trackChiSquared = normalizedResidual * normalizedResidual;
-		totalChiSquared += trackChiSquared;
-		totalNDF++;
+		if (!trackPunchThrough) {
+		    double trackChiSquared = normalizedResidual * normalizedResidual;
+		    totalChiSquared += trackChiSquared;
+		    totalNDF++;
+		}
 	    }
 	    printme += "Track: threshold="+newMapTrackToThreshold.get(tr).floatValue()
 		+", tolerance="+newMapTrackToTolerance.get(tr).floatValue()
 		+", E/p is "+((float)(clusterEnergy))+" / "+((float)(trackMomentum))+" => NormResid = "+((float)(normalizedResidual))
 		+". Core: effic="+((float)(coreEffic))+", purity="+((float)(corePurity))
 		+". Real: effic="+((float)(realEffic))+", purity="+((float)(realPurity));
+	    if (trackPunchThrough) {
+		printme += " [punch-through]";
+	    }
 	    System.out.println(printme);
 	}
 	for (Set<Track> jet : jets) {
@@ -1386,13 +1424,19 @@
 	    double realPurity = quotePurity_T(jet, showerComponents, allSharedClusters);
 	    double coreEffic = quoteEfficiency_T(jet, showerComponents);
 	    double corePurity = quotePurity_T(jet, showerComponents);
-	    System.out.println("   Jet: "
+	    boolean jetPunchThrough = isPunchThrough(showerComponents, allSharedClusters);
+	    String printme = new String("   Jet: "
 			       +" E/p is "+((float)(clusterEnergy))+" / "+((float)(jetMomentum))+" => NormResid = "+((float)(normalizedResidual))
 			       +". Core: effic="+((float)(coreEffic))+", purity="+((float)(corePurity))
 			       +". Real: effic="+((float)(realEffic))+", purity="+((float)(realPurity)));
-	    double jetChiSquared = normalizedResidual * normalizedResidual;
-	    totalChiSquared += jetChiSquared;
-	    totalNDF++;
+	    if (jetPunchThrough) {
+		printme += " [punch-through]";
+	    } else {
+		double jetChiSquared = normalizedResidual * normalizedResidual;
+		totalChiSquared += jetChiSquared;
+		totalNDF++;
+	    }
+	    System.out.println(printme);
 	}
 
 	System.out.println("Total CHI^2 = "+totalChiSquared+" with NDF ~ "+tracksSortedByMomentum.size());

lcsim/src/org/lcsim/contrib/uiowa
SteveMIPReassignmentAlgorithm.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- SteveMIPReassignmentAlgorithm.java	8 Jul 2008 16:36:23 -0000	1.1
+++ SteveMIPReassignmentAlgorithm.java	4 Aug 2008 17:38:17 -0000	1.2
@@ -14,16 +14,18 @@
     protected LocalHelixExtrapolator m_extrap;
     protected EventHeader m_event;
     protected double m_limit;
+    protected String m_mapTrackToMipName;
     protected Map<Track, Hep3Vector> m_cachePoint;
     protected Map<Track, Hep3Vector> m_cacheTangent;
 
-    public SteveMIPReassignmentAlgorithm(EventHeader event, double limit) {
+    public SteveMIPReassignmentAlgorithm(EventHeader event, double limit, String mapTrackToMip) {
 	m_event = event;
 	m_extrap = new LocalHelixExtrapolator();
 	m_extrap.process(event); // pick up geometry info
 	m_limit = limit;
 	m_cachePoint = new HashMap<Track, Hep3Vector>();
 	m_cacheTangent = new HashMap<Track, Hep3Vector>();
+	m_mapTrackToMipName = mapTrackToMip;
     }
 
     public Double computeFigureOfMerit(Track tr, Cluster clus) {
@@ -110,10 +112,8 @@
 
     protected void findPointAndTangentNoCache(Track tr, BasicHep3Vector outputPoint, BasicHep3Vector outputTangentUnit) throws ExtrapolationFailureException {
 	// Find the MIP trace for the track
-	Object objTrackMipMap = m_event.get("TrackMipMap");
-	Object objMipClusILMap = m_event.get("MipClusILMap");
+	Object objTrackMipMap = m_event.get(m_mapTrackToMipName);
 	Map<Track, BasicCluster> trkmipmap = ( Map<Track, BasicCluster> ) (objTrackMipMap);
-	Map<BasicCluster, Integer> clusILmap = ( Map<BasicCluster, Integer> ) (objMipClusILMap);
 
 	BasicCluster mip = trkmipmap.get(tr);
 	if (mip == null) {
CVSspam 0.2.8