Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDTreeDriver.java+654-61.53 -> 1.54
MJC: (contrib) Some test code to identify when a MIP overlaps a clump/photon

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.53 -> 1.54
diff -u -r1.53 -r1.54
--- ReclusterDTreeDriver.java	28 Sep 2008 06:22:57 -0000	1.53
+++ ReclusterDTreeDriver.java	29 Sep 2008 20:36:15 -0000	1.54
@@ -35,7 +35,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.53 2008/09/28 06:22:57 tjkim Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.54 2008/09/29 20:36:15 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -69,6 +69,7 @@
     protected boolean m_allowPhotonSeeds = true;
     protected boolean m_splitPhotonSeeds = true;
     protected boolean m_allPhotonsAreValidSeeds = true;
+    protected boolean m_checkForPhotonTrackOverlap = false;
 
     protected boolean m_allowNeutralCalibForEoverP = false;
 
@@ -238,6 +239,7 @@
 	if (m_useFcal) {
 	    allHits.addAll(allHitsFcalEndcap);
 	}
+	HitMap allHitsMap = new HitMap(allHits);
 
 	for (Cluster photon : photons) {
 	    if (photon.getCalorimeterHits().contains(null)) {
@@ -725,11 +727,10 @@
 	}
 
 	// Track seeds
-	Set<Cluster> seeds = clustersMatchedToTracks.keySet();
 	List<Cluster> seedLeftoverHitClusters = new Vector();
 	List<Cluster> nonSeedLeftoverHitClusters = new Vector();
 	for (Cluster clus : leftoverHitClusters) {
-	    if (seeds.contains(clus) ) {
+	    if (clustersMatchedToTracks.keySet().contains(clus) ) {
 		seedLeftoverHitClusters.add(clus);
 	    } else {
 		nonSeedLeftoverHitClusters.add(clus);
@@ -742,21 +743,21 @@
 	List<Cluster> nonSeedHadronLikePhotonClusters = new Vector();
 	List<Cluster> nonSeedPhotonLikePhotonClusters = new Vector();
 	for (Cluster clus : electronClusters) {
-	    if (seeds.contains(clus)) {
+	    if (clustersMatchedToTracks.keySet().contains(clus)) {
 		seedElectronClusters.add(clus);
 	    } else {
 		throw new AssertionError("Electron cluster is not a seed!");
 	    }
 	}
 	for (Cluster clus : photonLikePhotons) {
-	    if (seeds.contains(clus)) {
+	    if (clustersMatchedToTracks.keySet().contains(clus)) {
 		throw new AssertionError("Photon cluster is a seed!");
 	    } else {
 		nonSeedPhotonLikePhotonClusters.add(clus);
 	    }
 	}
 	for (Cluster clus : chargedHadronLikePhotons) {
-	    if (seeds.contains(clus)) {
+	    if (clustersMatchedToTracks.keySet().contains(clus)) {
 		seedHadronLikePhotonClusters.add(clus);
 	    } else {
 		nonSeedHadronLikePhotonClusters.add(clus);
@@ -764,12 +765,617 @@
 	}
 	if (seedElectronClusters.size() + seedHadronLikePhotonClusters.size() + nonSeedHadronLikePhotonClusters.size() + nonSeedPhotonLikePhotonClusters.size() != modifiedPhotonClusters.size() ) { throw new AssertionError("Book-keeping error"); }
 
+	// DEBUG
+	if (m_checkForPhotonTrackOverlap) {
+	    System.out.print("DEBUG: BEFORE: ");
+	    countHits(mipsOld, "mipsOld");
+	    countHits(mipsNew, "mipsNew");
+	    countHits(mips, "mips");
+	    countHits(clumps, "clumps");
+	    countHits(leftoverHitClusters, "leftoverHitClusters");
+	    countHits(modifiedPhotonClusters, "modifiedPhotonClusters");
+	    System.out.println("");
+	    printTracks(tweakedTracksMatchedToClusters);
+	    Set<Cluster> clustersFlaggedAsPhotonOverlaps = new HashSet<Cluster>();
+	    // Find which clumps/blocks are in the ECAL:
+	    Set<Cluster> clumpsAndBlocks = new HashSet<Cluster>();
+	    clumpsAndBlocks.addAll(clumps);
+	    clumpsAndBlocks.addAll(treesWithNoStructure);
+	    List<Cluster> clumpsAndBlocksEcal = new Vector<Cluster>();
+	    for (Cluster clus : clumpsAndBlocks) {
+		HitInECALDecision dec = new HitInECALDecision();
+		ListFilter filter = new ListFilter(dec);
+		List<CalorimeterHit> hitsEcal = filter.filterList(clus.getCalorimeterHits());
+		if (hitsEcal.size() == clus.getCalorimeterHits().size()) {
+		    // 100% of hits in ECAL
+		    clumpsAndBlocksEcal.add(clus);
+		} else if (hitsEcal.size() != 0) {
+		    // Error: Some hits in ECAL but not all
+		    throw new AssertionError("Book-keeping error: "+hitsEcal.size()+" / "+clus.getCalorimeterHits().size()+" hits of cluster in ECAL -- should be all or none");
+		}
+	    }
+	    // Check the ECAL clumps/blocks:
+	    for (Cluster clump : clumpsAndBlocksEcal) {
+		int size = clump.getCalorimeterHits().size();
+		Set<SimCalorimeterHit> chargedHits = new HashSet<SimCalorimeterHit>();
+		Set<SimCalorimeterHit> neutralHits = new HashSet<SimCalorimeterHit>();
+		Set<SimCalorimeterHit> photonHits = new HashSet<SimCalorimeterHit>();
+		Map<MCParticle, List<SimCalorimeterHit>> truth = truthFromCluster(clump);
+		for (MCParticle part : truth.keySet()) {
+		    if (Math.abs(part.getCharge()) > 0.5) {
+			// Charged
+			chargedHits.addAll(truth.get(part));
+		    } else if (part.getPDGID()==22) {
+			// Photon
+			photonHits.addAll(truth.get(part));
+		    } else {
+			// Other neutral
+			neutralHits.addAll(truth.get(part));
+		    }
+		}
+		Map<Integer, Set<CalorimeterHit>> mapHitsByLayer = new HashMap<Integer, Set<CalorimeterHit>>();
+		for (CalorimeterHit hit : clump.getCalorimeterHits()) {
+		    int layer = getLayer(hit);
+		    Set<CalorimeterHit> hits = mapHitsByLayer.get(layer);
+		    if (hits == null) { hits = new HashSet<CalorimeterHit>(); mapHitsByLayer.put(layer, hits); }
+		    hits.add(hit);
+		}
+		int firstLayer = 999;
+		int firstLayerWithThreeHits = 999;
+		int lastLayer = -1;
+		int lastLayerWithThreeHits = -1;
+		boolean foundLayerWithThreeHits = false;
+		for (Integer layer : mapHitsByLayer.keySet()) {
+		    Set<CalorimeterHit> hits = mapHitsByLayer.get(layer);
+		    if (hits != null && hits.size() > 0) {
+			if (layer < firstLayer) { firstLayer = layer; }
+			if (layer > lastLayer) { lastLayer = layer; }
+			if (layer < firstLayerWithThreeHits && hits.size()>=3) { firstLayerWithThreeHits = layer; foundLayerWithThreeHits = true; }
+			if (layer > lastLayerWithThreeHits && hits.size()>=3) { lastLayerWithThreeHits = layer; foundLayerWithThreeHits = true; }
+		    }
+		}
+		
+		boolean flaggedAsOverlap = (firstLayerWithThreeHits < 7 && firstLayer > 0 && size > 50);
+		if (flaggedAsOverlap) { 
+		    clustersFlaggedAsPhotonOverlaps.add(clump); 
+		}
+		
+		boolean correctToFlag = (photonHits.size() > 15 && chargedHits.size() > 5);
+		System.out.print("DEBUG: Flagged ");
+		if (clumps.contains(clump)) {
+		    System.out.print("clump");
+		} else {
+		    System.out.print("block");
+		}
+		System.out.print(" with "+size+" hits ");
+		if (correctToFlag) {
+		    if (flaggedAsOverlap) { 
+			System.out.print("correctly as overlap"); 
+		    } else {
+			System.out.print("incorrectly as non-overlap"); 
+		    }
+		} else {
+		    if (flaggedAsOverlap) { 
+			System.out.print("incorrectly as overlap"); 
+		    } else {
+			System.out.print("correctly as non-overlap"); 
+		    }
+		}
+		System.out.print(" -- cluster has "+chargedHits.size()+" charged hits + "+photonHits.size()+" photon hits + "+neutralHits.size()+" NH hits. ");
+		System.out.print("Layers: "+firstLayer+"-"+lastLayer);
+		if (foundLayerWithThreeHits) {
+		    System.out.println(" (or "+firstLayerWithThreeHits+"-"+lastLayerWithThreeHits+" with 3+ hits)");
+		} else {
+		    System.out.println(" (no layer with 3+ hits)");
+		}
+	    }
+	    
+	    for (Cluster clus : clustersFlaggedAsPhotonOverlaps) {
+		System.out.print("DEBUG: Studying flagged ");
+		if (clumps.contains(clus)) {
+		    System.out.print("clump");
+		} else {
+		    System.out.print("block");
+		}
+		System.out.println(" with "+clus.getCalorimeterHits().size()+" hits...");
+		Map<MCParticle, List<SimCalorimeterHit>> truth = truthFromCluster(clus);
+		for (MCParticle part : truth.keySet()) {
+		    System.out.println("DEBUG:  * Truth "+part.getPDGID()+" with p="+part.getMomentum().magnitude()+" contributed "+truth.get(part).size()+" hits");
+		}
+		// Find layer range spanned
+		int firstLayer = 999;
+		int lastLayer = -1;
+		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		    int layer = getLayer(hit);
+		    if (layer < firstLayer) { firstLayer = layer; }
+		    if (layer > lastLayer) { lastLayer = layer; }
+		}
+		if (firstLayer == 999 || lastLayer < 0) { throw new AssertionError("Book-keeping error"); }
+		// Find hits
+		Set<Long> clusterHitIDs = new HashSet<Long>();
+		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		    Long id = new Long(hit.getCellID());
+		    clusterHitIDs.add(id);
+		}
+		// Look for tracks which
+		//   1) Extrapolate into the cluster
+		//   2) Exit the cluster
+		//   3) Have MIP-like hits after they exit the cluster
+		List<Track> overlaidMipTracks = new Vector<Track>();
+		Map<Track, List<CalorimeterHit>> matchedMipSegments = new HashMap<Track, List<CalorimeterHit>>();
+		for (Track tr : trackList) {
+		    Set<Long> trackCellsFoundInCluster = new HashSet<Long>();
+		    HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
+		    Hep3Vector interceptPoint = null;
+		    if (result != null) { interceptPoint = result.getInterceptPoint(); }
+		    if (interceptPoint != null) {
+			// Scan within cluster
+			for (int iLayer=firstLayer; iLayer<=lastLayer; iLayer++) {
+			    Long cellID = result.extendToECALLayerAndFindCell(iLayer);
+			    if (cellID != null && clusterHitIDs.contains(cellID)) {
+				trackCellsFoundInCluster.add(cellID);
+			    }
+			}
+		    }
+		    System.out.println("DEBUG: Tested track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" and connected with "+trackCellsFoundInCluster.size()+" cells in flagged clump of "+clus.getCalorimeterHits().size()+" hits.");
+		    Cluster currentSeed = tracksMatchedToClusters.get(tr);
+		    if (currentSeed != null) {
+			System.out.print("DEBUG: Seed has "+currentSeed.getCalorimeterHits().size()+" and is a");
+			if (mips.contains(currentSeed)) { System.out.print(" MIP"); }
+			if (clumps.contains(currentSeed)) { System.out.print(" clump"); }
+			if (treesWithNoStructure.contains(currentSeed)) { System.out.print(" block"); }
+			if (seedLeftoverHitClusters.contains(currentSeed)) { System.out.print(" small seed"); }
+			if (modifiedPhotonClusters.contains(currentSeed)) { System.out.print(" photon"); }
+			System.out.println(".");
+		    }
+
+
+		    if (trackCellsFoundInCluster.size()>0) {
+			// Try following track further...
+			List<Long> trajectory = new Vector<Long>(); // Ordered list of cell extrapolations
+			int numLayersEcal = 31; // FIXME: Don't hard-code
+			int numLayersHcal = 40; // FIXME: Don't hard-code
+			for (int iLayer=0; iLayer<numLayersEcal; iLayer++) {
+			    Long cellID = result.extendToECALLayerAndFindCell(iLayer);
+			    if (cellID != null) {
+				trajectory.add(new Long(cellID));
+			    }
+			}
+			// FIXME: Consider endcap/barrel overlap here.
+			for (int iLayer=0; iLayer<numLayersHcal; iLayer++) {
+			    Long cellID = result.extendToHCALLayerAndFindCell(iLayer);
+			    if (cellID != null) {
+				trajectory.add(new Long(cellID));
+			    }
+			}
+			
+			// Step through layers.
+			List<Boolean> foundHit = new Vector<Boolean>();
+			List<Boolean> foundMipHit = new Vector<Boolean>();
+			int maxNeighboursForMip_011 = 1;
+
+			boolean enteredClump = false;
+			boolean exitedClump = false;
+			boolean currentlyInClump = false;
+			int lastPseudoLayerHit = -1;
+			int lastClumpPseudoLayer = -1;
+			int countPseudoLayersHitAfterLastExit = 0;
+			int lastExitPseudoLayer = -1;
+
+			for (int iLayer=0; iLayer<trajectory.size(); iLayer++) {
+			    Long cellID = trajectory.get(iLayer);
+			    CalorimeterHit hit = allHitsMap.get(cellID);
+			    if (hit == null) {
+				// Didn't find a hit (and therefore no MIP hit)
+				foundHit.add(new Boolean(false));
+				foundMipHit.add(new Boolean(false));
+			    } else {
+				// Found a hit
+				foundHit.add(new Boolean(true));
+				lastPseudoLayerHit = iLayer;
+				org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
+				id.setID(cellID);
+				long[] neighbourIDs_011 = id.getNeighbourIDs(0, 1, 1);
+				long[] neighbourIDs_022 = id.getNeighbourIDs(0, 2, 2);
+				Set<CalorimeterHit> neighboutHits_011 = new HashSet<CalorimeterHit>();
+				Set<CalorimeterHit> neighboutHits_022 = new HashSet<CalorimeterHit>();
+				for (long neighbourID : neighbourIDs_011) {
+				    CalorimeterHit neighbourHit = allHitsMap.get(neighbourID);
+				    if (neighbourHit != null) {
+					neighboutHits_011.add(neighbourHit);
+				    }
+				}
+				for (long neighbourID : neighbourIDs_022) {
+				    CalorimeterHit neighbourHit = allHitsMap.get(neighbourID);
+				    if (neighbourHit != null) {
+					neighboutHits_022.add(neighbourHit);
+				    }
+				}
+				boolean mipHit = (neighboutHits_011.size() <= maxNeighboursForMip_011);
+				foundMipHit.add(new Boolean(mipHit));
+				System.out.print("DEBUG:   * Track extrapolated to hit in layer "+getLayer(hit)+" of "+hit.getSubdetector().getName());
+				System.out.print(". Neighbour hits in 3x3="+neighboutHits_011.size()+", in 5x5="+neighboutHits_022.size());
+				if (mipHit) {
+				    System.out.print(" => MIP.");
+				} else {
+				    System.out.print(" => non-MIP.");
+				}
+				if (clus.getCalorimeterHits().contains(hit)) {
+				    System.out.print(" -- in clump!");
+				    enteredClump = true;
+				    currentlyInClump = true;
+				    lastClumpPseudoLayer = iLayer;
+				} else {
+				    // Not in clump
+				    if (enteredClump) {
+					exitedClump = true;
+				    }
+				    if (currentlyInClump) {
+					// We just exited.
+					lastExitPseudoLayer = iLayer;
+					currentlyInClump = false;
+					countPseudoLayersHitAfterLastExit = 1;
+				    } else {
+					countPseudoLayersHitAfterLastExit++;
+				    }
+				}
+				System.out.println(" ");
+			    }
+			}
+
+			// Sanity checks
+			if (foundMipHit.size() != trajectory.size()) { throw new AssertionError("Book-keeping error"); }
+			if (foundHit.size() != trajectory.size()) { throw new AssertionError("Book-keeping error"); }
+			
+			// OK. Now, check if this is consistent with a charged track punching
+			// through a photon/neutral hadron.
+			//
+			// First, require at least m hits in the first n pseudolayers after
+			// exiting the clump to make sure that we really are extrapolating
+			// the right track.
+			int checkLayersAfterExitForContinuity = 7;
+			int minHitsInLayersAfterExitForContinuity = 5;
+			// Then, require a sequence of MIP hits to make sure that we really
+			// are following a MIP and not just part of the charged shower.
+			// We are allowed to skip a couple of layers immediately after exit
+			// since these may have remnants of the photon/hadron cluster.
+			int numLayersAllowedToSkipAfterExitForMipCheck = 2;
+			int numLayersToTestForMipCheck = 6;
+			int minMipHitsForMipCheck = 5;
+
+			// Check for continuity
+			boolean passesContinuityCheck = false;
+			if (lastExitPseudoLayer >= 0) {
+			    int hitsFoundForContinuityCheck = 0;
+			    for (int iLayer=0; iLayer<checkLayersAfterExitForContinuity; iLayer++) {
+				int pseudoLayer = iLayer + lastClumpPseudoLayer + 1;
+				if (pseudoLayer<foundHit.size() && foundHit.get(pseudoLayer)) {
+				    hitsFoundForContinuityCheck++;
+				}
+			    }
+			    if (hitsFoundForContinuityCheck >= minHitsInLayersAfterExitForContinuity) {
+				passesContinuityCheck = true;
+			    }
+			    System.out.println("DEBUG: Found "+hitsFoundForContinuityCheck+"/"+checkLayersAfterExitForContinuity+" hits post-exit => passesContinuityCheck="+passesContinuityCheck);
+			}
+
+			// Check for MIPness after exit
+			boolean passesMipCheck = false;
+			if (lastExitPseudoLayer >= 0) {
+			    // Look for at least [minMipHitsForMipCheck] MIP hits in a set of [numLayersToTestForMipCheck] layers.
+			    // We are allowed to skip up to [numLayersAllowedToSkipAfterExitForMipCheck] layers.
+			    for (int iSkip=0; iSkip<=numLayersAllowedToSkipAfterExitForMipCheck; iSkip++) {
+				int mipHitsFound = 0;
+				for (int iLayer=0; iLayer<numLayersToTestForMipCheck; iLayer++) {
+				    int pseudoLayer = iLayer + lastClumpPseudoLayer + iSkip + 1;
+				    if (pseudoLayer<foundMipHit.size() && foundMipHit.get(pseudoLayer)) {
+					mipHitsFound++;
+				    }
+				}
+				if (mipHitsFound >= minMipHitsForMipCheck) {
+				    passesMipCheck = true;
+				}
+				if (mipHitsFound > numLayersToTestForMipCheck) { throw new AssertionError("Book-keeping error"); }
+				System.out.println("DEBUG: For iSkip="+iSkip+", found "+mipHitsFound+"/"+numLayersToTestForMipCheck+" MIP hits post-exit => passesMipCheck="+passesMipCheck);
+			    }
+			}
+
+			if (passesContinuityCheck && passesMipCheck) {
+			    // Passes the cuts!
+			    overlaidMipTracks.add(tr);
+			    List<CalorimeterHit> matchedHits = new Vector<CalorimeterHit>();
+			    for (int iLayer=0; iLayer<trajectory.size(); iLayer++) {
+				if (foundHit.get(iLayer)) {
+				    Long cellID = trajectory.get(iLayer);
+				    CalorimeterHit hit = allHitsMap.get(cellID);
+				    if (hit == null) { throw new AssertionError("Book-keeping error"); }
+				    if (clus.getCalorimeterHits().contains(hit)) {
+					matchedHits.add(hit);
+				    }
+				}
+			    }
+			    matchedMipSegments.put(tr, matchedHits);
+			}
+
+			if (!enteredClump) {
+			    System.out.println("DEBUG:    -> ERROR: Never entered clump.");
+			} else {
+			    if (!exitedClump) {
+				System.out.println("DEBUG:    -> Never exited clump.");
+			    } else {
+				System.out.print("DEBUG:    -> Last clump layer = "+lastClumpPseudoLayer);
+				System.out.print(", exit layer = "+lastExitPseudoLayer);
+				int layersSkippedAfterExit = lastExitPseudoLayer - lastClumpPseudoLayer;
+				if (layersSkippedAfterExit < 0 && !currentlyInClump) {
+				    // This isn't right.
+				    //
+				    // Note that if:
+				    //   a) We exited the clump (e.g. in layer 15)    <-- lastExitPseudoLayer
+				    //   b) We later re-entered the clump (e.g. in layer 20)
+				    //   c) We never left the clump (e.g. ending in layer 23)  <-- lastClumpPseudoLayer
+				    // then we may have a situation where lastExitPseudoLayer < lastClumpPseudoLayer
+				    // (e.g. 15 < 23). That's why we have the "!currentlyInClump" check.
+				    throw new AssertionError("MISCOUNT -- layersSkippedAfterExit = "+lastExitPseudoLayer+" - "+lastClumpPseudoLayer+" = "+layersSkippedAfterExit);
+				}
+				if (layersSkippedAfterExit > 1) {
+				    System.out.print(" -- skipped "+(layersSkippedAfterExit-1)+" layers after exit");
+				}
+				System.out.println();
+			    }
+			}
+		    }
+		}
+		if (overlaidMipTracks.size()>0) {
+		    // There were some MIP tracks overlaid. Deal with this.
+		    Track initiallyMatchedTrack = clustersMatchedToTweakedTracks.get(clus);
+		    if (initiallyMatchedTrack == null) { System.out.println("DEBUG: No initially matched track."); }
+		    List<Track> initiallyMatchedTrackList = new Vector<Track>();
+		    if (initiallyMatchedTrack != null) {
+			if (initiallyMatchedTrack instanceof MultipleTrackTrack) {
+			    for (Track tr : initiallyMatchedTrack.getTracks()) { if (tr == null) { throw new AssertionError("Null track!"); } }
+			    initiallyMatchedTrackList.addAll(initiallyMatchedTrack.getTracks());
+			} else {
+			    initiallyMatchedTrackList.add(initiallyMatchedTrack);
+			}
+		    }
+		    System.out.println("DEBUG: "+initiallyMatchedTrackList.size()+" initially matched track(s): ");
+		    for (Track tr : initiallyMatchedTrackList) {
+			System.out.println("DEBUG:    * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
+		    }
+		    List<Track> overlaidMipTracksInitiallyMatched = new Vector<Track>();
+		    List<Track> overlaidMipTracksNotInitiallyMatched = new Vector<Track>();
+		    for (Track tr : overlaidMipTracks) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			if (initiallyMatchedTrackList.contains(tr)) {
+			    overlaidMipTracksInitiallyMatched.add(tr);
+			} else {
+			    overlaidMipTracksNotInitiallyMatched.add(tr);
+			}
+		    }
+		    if (overlaidMipTracksInitiallyMatched.size() + overlaidMipTracksNotInitiallyMatched.size() != overlaidMipTracks.size()) { throw new AssertionError("Book-keeping error"); }
+		    // If all tracks turned out to be MIPs, the cluster will wind up completely
+		    // track free -- we'll reflag it as a photon in that case.
+		    List<Track> remainingTracks = new Vector<Track>();
+		    for (Track tr : initiallyMatchedTrackList) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			if ( ! overlaidMipTracks.contains(tr) ) {
+			    remainingTracks.add(tr);
+			}
+		    }
+		    if (remainingTracks.size() != initiallyMatchedTrackList.size() - overlaidMipTracksInitiallyMatched.size()) { throw new AssertionError("Book-keeping error"); }
+		    boolean reflagClumpAsPhoton = (remainingTracks.size()==0);
+		    // For each track, excise the MIP hits and reflag them as a MIP segment.
+		    // Mark that as the new track seed.
+		    Set<CalorimeterHit> hitsToFlagAsMip = new HashSet<CalorimeterHit>();
+		    Map<Track, Cluster> newMipClusters = new HashMap<Track,Cluster>();
+		    for (Track tr : overlaidMipTracks) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			List<CalorimeterHit> hits = matchedMipSegments.get(tr);
+			BasicCluster newCluster = new BasicCluster();
+			for (CalorimeterHit hit : hits) {
+			    if (hit == null) { throw new AssertionError("Null hit!"); }
+			    if (!hitsToFlagAsMip.contains(hit)) {
+				// OK -- this hit was identified as belonging to this track
+				// and wasn't previously assigned
+				newCluster.addHit(hit);
+				hitsToFlagAsMip.add(hit);
+				System.out.println("DEBUG: Track that passed tests passed through a hit in layer "+getLayer(hit)+" -- added it to output cluster");
+			    }
+			}
+			if (newCluster.getCalorimeterHits().size()==0) {
+			    throw new AssertionError("Handle this case");
+			}
+			newMipClusters.put(tr, newCluster);
+		    }
+		    // Now make the remnants into a new cluster
+		    BasicCluster remainder = new BasicCluster();
+		    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+			if (hit == null) { throw new AssertionError("Null hit!"); }
+			if ( ! hitsToFlagAsMip.contains(hit) ) {
+			    remainder.addHit(hit);
+			}
+		    }
+		    if (remainingTracks.size()>0 && remainder.getCalorimeterHits().size()==0) { throw new AssertionError("Handle this case!"); }
+		    // Replace all obsolete track <--> cluster associations.
+		    // Start by recording how things used to look:
+		    Map<Track,Cluster> oldMatches = new HashMap<Track,Cluster>();
+		    for (Track tr : overlaidMipTracks) {
+			// 1) MIP tracks (possibly pointing to another seed)
+			Cluster oldMatch = tracksMatchedToClusters.get(tr);
+			if (oldMatch == null) {
+			    System.out.println("WARNING: Overlaid MIP track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" has null cluster match");
+			}
+			oldMatches.put(tr, oldMatch);
+			if (oldMatch != null) {
+			    // 2) Other tracks (possibly non-MIP) pointing to the same seed as a MIP track
+			    List<Track> matchedTracks = clustersMatchedToTracks.get(oldMatch);
+			    for (Track otherTr : matchedTracks) {
+				if (otherTr == null) { throw new AssertionError("Null track!"); }
+				oldMatches.put(otherTr, oldMatch);
+			    }
+			}
+		    }
+		    for (Track tr : remainingTracks) {
+			// 3) Any other track pointing to the initial cluster
+			Cluster oldMatch = tracksMatchedToClusters.get(tr);
+			oldMatches.put(tr, oldMatch);
+		    }
+		    // How will they look after:
+		    Map<Track,Cluster> newMatches = new HashMap<Track,Cluster>();
+		    for (Track tr : oldMatches.keySet()) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			if (overlaidMipTracks.contains(tr)) {
+			    Cluster mipClus = newMipClusters.get(tr);
+			    if (mipClus == null) { throw new AssertionError("Null MIP cluster piece!"); }
+			    newMatches.put(tr, mipClus);
+			} else if (remainingTracks.contains(tr)) {
+			    // This track pointed to the cluster we're breaking
+			    // up but wasn't flagged as a MIP.
+			    if (remainder.getCalorimeterHits().size()==0) { throw new AssertionError("Empty cluster!"); }
+			    newMatches.put(tr, remainder);
+			} else {			    
+			    // This track pointed to another cluster (that we're not
+			    // breaking up). One of the MIP tracks also happened to
+			    // point to that cluster.
+			    Cluster oldClus = oldMatches.get(tr);
+			    if (oldClus == null) { throw new AssertionError("Null old cluster!"); }
+			    newMatches.put(tr, oldClus);
+			}
+		    }
+		    // Check for ambiguity:
+		    Map<Cluster, List<Track>> newMatchesReverseMap = new HashMap<Cluster, List<Track>>();
+		    for (Track tr : newMatches.keySet()) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			Cluster matchedClus = newMatches.get(tr);
+			if (matchedClus == null) { throw new AssertionError("Null matched cluster!!"); }
+			List<Track> reverseMatches = newMatchesReverseMap.get(matchedClus);
+			if (reverseMatches == null) {
+			    reverseMatches = new Vector<Track>();
+			    newMatchesReverseMap.put(matchedClus, reverseMatches);
+			}
+			reverseMatches.add(tr);
+		    }
+		    // Purge the old matching info
+		    System.out.println("DEBUG: Will purge old matches for "+oldMatches.keySet().size()+" tracks:");
+		    for (Track tr : oldMatches.keySet()) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			Cluster seed = oldMatches.get(tr);
+			if (seed == null) { 
+			    System.out.println("DEBUG: Null seed for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
+			} else {
+			    clustersMatchedToTracks.remove(seed); // OK
+			    clustersMatchedToTweakedTracks.remove(seed); // OK (2/2)
+			}
+			tracksMatchedToClusters.remove(tr); // OK
+			uniquelyMatchedTracks.remove(tr); // OK
+			ambiguouslyMatchedTracks.remove(tr); // OK
+			Track tweakedTrack = mapOrigTrackToTweakedTrack.get(tr);
+			if (tweakedTrack == null) { throw new AssertionError("Null tweaked track!"); }
+			mapOrigTrackToTweakedTrack.remove(tr); // OK (2/2)
+			tweakedTracks.remove(tweakedTrack); // OK (2/2)
+			tweakedTracksMatchedToClusters.remove(tweakedTrack); // OK (2/2)
+			System.out.print("DEBUG: Purging track with p="+(new BasicHep3Vector(tr.getMomentum()).magnitude()));
+			if (tweakedTrack instanceof MultipleTrackTrack) {
+			    System.out.print(" from MultipleTrackTrack with "+tweakedTrack.getTracks().size()+" daughters and p="+(new BasicHep3Vector(tweakedTrack.getMomentum()).magnitude()));
+			}
+			if (seed != null) {
+			    System.out.println(" that used to point to a seed with "+seed.getCalorimeterHits().size());
+			} else {
+			    System.out.println(" that used to point to a null seed");
+			}
+		    }
+		    // Add the new matching info:
+		    for (Track tr : newMatches.keySet()) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			Cluster seed = newMatches.get(tr);
+			if (seed == null) { throw new AssertionError("Null seed!"); }
+			tracksMatchedToClusters.put(tr, seed);
+		    }
+		    for (Cluster seed : newMatchesReverseMap.keySet()) {
+			if (seed == null) { throw new AssertionError("Null seed!"); }
+			List<Track> tracks = newMatchesReverseMap.get(seed);
+			if (tracks == null) { throw new AssertionError("Null track list!"); }
+			clustersMatchedToTracks.put(seed, tracks);
+			if (tracks.size()==0) {
+			    throw new AssertionError("Book-keeping error!");
+			} else if (tracks.size()==1) {
+			    // Unique match
+			    Track tr = tracks.get(0);
+			    if (tr == null) { throw new AssertionError("Null track!"); }
+			    uniquelyMatchedTracks.add(tr);
+			    tweakedTracks.add(tr);
+			    mapOrigTrackToTweakedTrack.put(tr,tr);
+			    clustersMatchedToTweakedTracks.put(seed, tr);
+			    tweakedTracksMatchedToClusters.put(tr, seed);
+			    System.out.println("DEBUG: Unique track match: Tweaked track with p="+(new BasicHep3Vector(tr.getMomentum()).magnitude())+" -> seed cluster with "+seed.getCalorimeterHits().size()+" hits.");
+			} else {
+			    // Ambiguous match still
+			    for (Track tr : tracks) { if (tr == null) { throw new AssertionError("Null track!"); } }
+			    MultipleTrackTrack tweakedTrack = new MultipleTrackTrack(tracks);
+			    ambiguouslyMatchedTracks.add(tweakedTrack);
+			    tweakedTracks.add(tweakedTrack);
+			    for (Track tr : tracks) {
+				mapOrigTrackToTweakedTrack.put(tr, tweakedTrack);
+			    }
+			    clustersMatchedToTweakedTracks.put(seed, tweakedTrack);
+			    tweakedTracksMatchedToClusters.put(tweakedTrack, seed);
+			    System.out.println("DEBUG: Ambiguous track match: Tweaked track ("+tracks.size()+" sub-tracks) with p="+(new BasicHep3Vector(tweakedTrack.getMomentum()).magnitude())+" -> seed cluster with "+seed.getCalorimeterHits().size()+" hits.");
+			}			    
+		    }
+		    // If appropriate, make the remaining cluster pieces into a photon.
+		    if (remainder.getCalorimeterHits().size() > 0) {
+			if (reflagClumpAsPhoton) {
+			    System.out.println("DEBUG: Flagged remainder of cluster ("+remainder.getCalorimeterHits().size()+" hits) as photon");
+			    nonSeedHadronLikePhotonClusters.add(remainder);
+			    modifiedPhotonClusters.add(remainder);
+			} else {
+			    System.out.println("DEBUG: Flagged remainder of cluster ("+remainder.getCalorimeterHits().size()+" hits) as clump");
+			    clumps.add(remainder);
+			}
+		    }
+		    // Purge the old cluster that we split up from any listings:
+		    mipsOld.remove(clus);
+		    mipsNew.remove(clus);
+		    mips.remove(clus);
+		    clumps.remove(clus);
+		    leftoverHitClusters.remove(clus);
+		    treesWithNoStructure.remove(clus);
+		    modifiedPhotonClusters.remove(clus);
+		    // Enter the new split-up clusters:
+		    for (Track tr : overlaidMipTracks) {
+			Cluster newSeed = newMipClusters.get(tr);
+			if (newSeed == null) { throw new AssertionError("Null seed!"); }
+			if (newSeed.getCalorimeterHits().size()>3) {
+			    mipsNew.add(newSeed);
+			    mips.add(newSeed);
+			    System.out.println("DEBUG: Added new MIP seed with "+newSeed.getCalorimeterHits().size()+" hits for a track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
+			} else {
+			    leftoverHitClusters.add(newSeed);
+			    System.out.println("DEBUG: Added new leftoverHitCluster seed with "+newSeed.getCalorimeterHits().size()+" hits for a track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
+			}
+		    }
+		}
+	    }
+	    System.out.print("DEBUG: AFTER:  ");
+	    countHits(mipsOld, "mipsOld");
+	    countHits(mipsNew, "mipsNew");
+	    countHits(mips, "mips");
+	    countHits(clumps, "clumps");
+	    countHits(leftoverHitClusters, "leftoverHitClusters");
+	    countHits(modifiedPhotonClusters, "modifiedPhotonClusters");
+	    System.out.println("");
+	    printTracks(tweakedTracksMatchedToClusters);
+	}
+
+
+
 	// Sort matched tracks by momentum
 	List<Track> tracksSortedByMomentum = new Vector<Track>();
 	for (Track tr : tweakedTracksMatchedToClusters.keySet()) {
 	    tracksSortedByMomentum.add(tr);
 	}
 	Collections.sort(tracksSortedByMomentum, new MomentumSort());
+	Set<Cluster> seeds = clustersMatchedToTracks.keySet();
 
 	if (m_debug) {
 	    debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracks, seeds, tracksSortedByMomentum, tweakedTracksMatchedToClusters);
@@ -3695,7 +4301,20 @@
     void clusteringIteration(List<Track> tracksSortedByMomentum, Map<Track, Cluster> tweakedTracksMatchedToClusters, List<Track> electronTracks, Map<Track, Set<Cluster>> newMapTrackToShowerComponents, Map<Cluster, Track> newMapShowerComponentToTrack, Map<Track, Set<Cluster>> newMapTrackToVetoedAdditions, Map<Track, Set<Cluster>> mapTrackToVetoedAdditionsDueToTrackSeed, List<SharedClusterGroup> allSharedClusters, Map<Track,Double> newMapTrackToThreshold, Map<Track,Double> newMapTrackToTolerance) {
 	// Pop the seeds in first:
 	for (Track tr : tracksSortedByMomentum) {
+	    if (tr == null) { throw new AssertionError("Null track!"); }
 	    Cluster seed = tweakedTracksMatchedToClusters.get(tr);
+	    if (seed == null) { 
+		String printme = new String("ERROR: Track has null seed!\n");
+		if (tr instanceof MultipleTrackTrack) {
+		    printme += "Track is a MultipleTrackTrack with "+tr.getTracks().size()+" sub-tracks:\n";
+		    for (Track subtr : tr.getTracks()) {
+			printme += "  * p="+((new BasicHep3Vector(subtr.getMomentum())).magnitude())+"\n";
+		    }
+		} else {
+		    printme += "Track has p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+"\n";
+		}
+		throw new AssertionError(printme);
+	    }
 	    newMapShowerComponentToTrack.put(seed, tr);
 	}
 	// Build clusters:
@@ -4292,5 +4911,34 @@
 	    clustersMatchedToTweakedTracks.put(clus, tr);
 	}
     }
+
+    private void countHits(Collection<Cluster> clusters, String name) {
+	int nHits=0;
+	for (Cluster clus : clusters) {
+	    nHits += clus.getCalorimeterHits().size();
+	}
+	System.out.print(clusters.size()+" "+name+"-->"+nHits+" hits; ");
+    }
+    private void printTracks(Map<Track, Cluster> tweakedTracksMatchedToClusters) {
+	for (Track tr : tweakedTracksMatchedToClusters.keySet()) {
+	    Hep3Vector p3 = new BasicHep3Vector(tr.getMomentum());
+	    double p = p3.magnitude();
+	    Cluster seed = tweakedTracksMatchedToClusters.get(tr);
+	    System.out.println(" * Tweaked track with p="+p+" matched to seed with "+seed.getCalorimeterHits().size()+" hits.");
+	}
+	for (Track tr : tweakedTracksMatchedToClusters.keySet()) {
+	    if (tr instanceof MultipleTrackTrack) {
+		List<Track> subtracks = tr.getTracks();
+		Hep3Vector p3 = new BasicHep3Vector(tr.getMomentum());
+		double p = p3.magnitude();
+		System.out.print(" * MultipleTrackTrack with p="+p+" has "+subtracks.size()+" subtracks: ");
+		for (Track subtr : subtracks) {
+		    Hep3Vector sub_p3 = new BasicHep3Vector(subtr.getMomentum());
+		    double sub_p = sub_p3.magnitude();
+		    System.out.print("p="+sub_p+"  ");
+		}
+	    }
+	}
+    }
 }
 
CVSspam 0.2.8