Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDTreeDriverTimeCut.java-8191.2 removed
MJC: (contrib) Remove obsolete file

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriverTimeCut.java removed after 1.2
diff -N ReclusterDTreeDriverTimeCut.java
--- ReclusterDTreeDriverTimeCut.java	5 Jun 2008 16:58:49 -0000	1.2
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,819 +0,0 @@
-package org.lcsim.contrib.uiowa;
-
-import java.util.*; 
-import java.io.IOException; 
-import hep.physics.vec.*;
-import hep.physics.particle.properties.*;
-import org.lcsim.util.*;
-import org.lcsim.util.hitmap.*;
-import org.lcsim.event.*;
-import org.lcsim.event.util.*;
-import org.lcsim.recon.cluster.mipfinder.*;
-import org.lcsim.recon.cluster.util.*;
-import org.lcsim.recon.cluster.clumpfinder.*;
-import org.lcsim.recon.pfa.identifier.*;
-import org.lcsim.mc.fast.tracking.ReconTrack;
-import org.lcsim.event.base.*;
-import hep.physics.particle.Particle;
-import org.lcsim.recon.cluster.structural.likelihood.*;
-import org.lcsim.recon.cluster.structural.*;
-import org.lcsim.recon.cluster.mipfinder.*;
-import org.lcsim.recon.cluster.clumpfinder.*;
-import org.lcsim.util.swim.Line;
-import org.lcsim.geometry.Detector;
-import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
-import org.lcsim.geometry.*;
-
-public class ReclusterDTreeDriverTimeCut extends ReclusterDTreeDriver {
-
-    public ReclusterDTreeDriverTimeCut(String dTreeClusterList, String trackList, String mcList) {
-	super(dTreeClusterList, trackList, mcList);
-	m_outputParticleListName = "DTreeReclusteredTimeCutParticles";
-	m_allowComponentsToStraddleLargeClusters = true;
-	m_writeExtraEventOutput = false;
-    }
-
-    public void process(EventHeader event) {
-	super.debugProcess(event);
-	m_event = event;
-
-	// Read in
-	List<Cluster> dTreeClusters = event.get(Cluster.class, m_dTreeClusterListName);
-	List<Cluster> dTreeClustersTimeCut = event.get(Cluster.class, "HcalDTreeClustersTimeCut");
-	List<Track> trackList = event.get(Track.class, m_inputTrackList);
-	List<Cluster> photons = event.get(Cluster.class, "PhotonClustersForDTree");
-	List<Cluster> largeClusters = dTreeClusters; // FIXME: NOT IDEAL! Perhaps run MST on DTree clusters?
-	if (trackList == null) { throw new AssertionError("Null track list!"); }
-	if (trackList.contains(null)) { throw new AssertionError("Track list contains null!"); }	
-
-	List<CalorimeterHit> allHitsEcalBarrel = m_event.get(CalorimeterHit.class, "EcalBarrDigiHits");
-	List<CalorimeterHit> allHitsEcalEndcap = m_event.get(CalorimeterHit.class, "EcalEndcapDigiHits");
-	List<CalorimeterHit> allHitsHcalBarrel = m_event.get(CalorimeterHit.class, "HcalBarrDigiHits");
-	List<CalorimeterHit> allHitsHcalEndcap = m_event.get(CalorimeterHit.class, "HcalEndcapDigiHits");
-
-	Set<CalorimeterHit> allHits = new HashSet<CalorimeterHit>();
-	allHits.addAll(allHitsEcalBarrel);
-	allHits.addAll(allHitsEcalEndcap);
-	allHits.addAll(allHitsHcalBarrel);
-	allHits.addAll(allHitsHcalEndcap);
-
-	List<Cluster> dTreeClusters_EcalBarrel = event.get(Cluster.class, "EcalBarrDTrees");
-	List<Cluster> dTreeClusters_EcalEndcap = event.get(Cluster.class, "EcalEndcapDTrees");
-	List<Cluster> dTreeClusters_HcalBarrel = event.get(Cluster.class, "HcalBarrDTrees");
-	List<Cluster> dTreeClusters_HcalEndcap = event.get(Cluster.class, "HcalEndcapDTrees");
-	Set<Cluster> dTreeClusters_Ecal = new HashSet<Cluster>();
-	Set<Cluster> dTreeClusters_Hcal = new HashSet<Cluster>();
-	dTreeClusters_Ecal.addAll(dTreeClusters_EcalBarrel);
-	dTreeClusters_Ecal.addAll(dTreeClusters_EcalEndcap);
-	dTreeClusters_Hcal.addAll(dTreeClusters_HcalBarrel);
-	dTreeClusters_Hcal.addAll(dTreeClusters_HcalEndcap);
-	if (dTreeClusters_Ecal.size() + dTreeClusters_Hcal.size() != dTreeClusters.size()) { throw new AssertionError("Book-keeping error"); }
-
-	for (Cluster photon : photons) {
-	    if (photon.getCalorimeterHits().contains(null)) {
-		throw new AssertionError("photon contains null hit");
-	    }
-	}
-
-	// Lists of subclusters
-	List<Cluster> leftoverHitClusters = new Vector<Cluster>();
-	// Make maps (from subclusters to tree)
-	Map<Cluster, Cluster> treeOfMip = new HashMap<Cluster, Cluster>();
-	Map<Cluster, Cluster> treeOfClump = new HashMap<Cluster,Cluster>();
-	Map<Cluster, Cluster> treeOfLeftoverHits = new HashMap<Cluster,Cluster>();
-	List<Cluster> treesWithNoStructure = new Vector<Cluster>();
-
-	// Book-keeping: What hits are we free to cluster? (Photon hits excluded)
-	List<CalorimeterHit> availableHitsECAL = new Vector<CalorimeterHit>();
-	List<CalorimeterHit> availableHitsHCAL = new Vector<CalorimeterHit>();
-	for (Cluster clus : dTreeClusters_Ecal) {
-	    availableHitsECAL.addAll(clus.getCalorimeterHits());
-	}
-	for (Cluster clus : dTreeClusters_Hcal) {
-	    availableHitsHCAL.addAll(clus.getCalorimeterHits());
-	}
-
-	// First, look for MIPs in whole event, irrespective of timing.
-	// The "oldMipFinder" can be run all all events, but the new one
-	// can only be run from within a DTree cluster due to the combinatorics.
-
-	// Start by scanning whole event with old mip finder:
-	List<Cluster> mipsOld = new Vector<Cluster>();
-	Clusterer oldMipFinder = new TrackClusterDriver();
-	List<Cluster> mipsOldEcal = oldMipFinder.createClusters(availableHitsECAL);
-	List<Cluster> mipsOldHcal = oldMipFinder.createClusters(availableHitsHCAL);
-	// Check quality & back out any bad MIPs:
-	if (m_removePoorQualityMips) {
-	    removePoorQualityMips(mipsOldEcal);
-	    removePoorQualityMips(mipsOldHcal);
-	}
-	mipsOld.addAll(mipsOldEcal);
-	mipsOld.addAll(mipsOldHcal);
-	Set<CalorimeterHit> mipHitsOld = new HashSet<CalorimeterHit>();
-	for (Cluster mip : mipsOld) {
-	    mipHitsOld.addAll(mip.getCalorimeterHits());
-	}
-
-	// For cluster sharing book-keeping:
-	Map<Cluster,Cluster> treeOfSharedCluster = new HashMap<Cluster,Cluster>();
-	Map<Cluster,List<Cluster>> targetsInTree = new HashMap<Cluster,List<Cluster>>();
-
-	// Now look for mips & clumps inside DTreeClusters, still with no time cut:
-	List<Cluster> mipsNew = new Vector<Cluster>();
-	Clusterer newMipFinderECAL = new org.lcsim.recon.cluster.mipfinder.NonProjectiveMipFinder(m_newMipFinderRadiusECAL);
-	Clusterer newMipFinderHCAL = new org.lcsim.recon.cluster.mipfinder.NonProjectiveMipFinder(m_newMipFinderRadiusHCAL);
-	List<Cluster> clumps = new Vector<Cluster>();
-	Clusterer clumpFinder = new ClumpFinder();
-	for (Cluster dTreeCluster : dTreeClusters) {
-	    // Book-keeping
-	    Set<CalorimeterHit> allHitsInTree = new HashSet<CalorimeterHit>();
-	    allHitsInTree.addAll(dTreeCluster.getCalorimeterHits());
-	    List<Cluster> targetList = new Vector<Cluster>();
-	    targetsInTree.put(dTreeCluster, targetList);
-	    // Are we in the ECAL or the HCAL?
-	    boolean treeInECAL = (dTreeClusters_Ecal.contains(dTreeCluster));
-	    boolean treeInHCAL = (dTreeClusters_Hcal.contains(dTreeCluster));
-	    if ( !(treeInECAL || treeInHCAL) || (treeInECAL && treeInHCAL) ) { throw new AssertionError("Book-keeping error"); }
-	    // Prepare hitmap, initially holdling all the hits in the cluster except those already used to make MIPs:
-	    List<CalorimeterHit> unassignedHitsInTree = new Vector<CalorimeterHit>();
-	    unassignedHitsInTree.addAll(dTreeCluster.getCalorimeterHits());
-	    unassignedHitsInTree.removeAll(mipHitsOld);
-	    // Look to see which old MIPs are found in this cluster:
-	    List<Cluster> oldMipsInThisTree = new Vector<Cluster>();
-	    for (Cluster mip : mipsOld) {
-		Set<CalorimeterHit> overlappingHitsThisMIP = new HashSet<CalorimeterHit>();
-		for (CalorimeterHit mipHit : mip.getCalorimeterHits()) {
-		    if (allHitsInTree.contains(mipHit)) {
-			overlappingHitsThisMIP.add(mipHit);
-		    }
-		}
-		if (overlappingHitsThisMIP.size()>0) {
-		    oldMipsInThisTree.add(mip);
-		}
-	    }
-	    // Look for MIPs again (new mip finder)
-	    List<Cluster> mipClustersNewInTree;
-	    if (treeInECAL) {
-		mipClustersNewInTree = newMipFinderECAL.createClusters(unassignedHitsInTree);
-	    } else {
-		mipClustersNewInTree = newMipFinderHCAL.createClusters(unassignedHitsInTree);
-	    }
-	    if (m_removePoorQualityMips) { 
-		removePoorQualityMips(mipClustersNewInTree);
-	    }
-	    mipsNew.addAll(mipClustersNewInTree);
-	    for (Cluster mip : mipClustersNewInTree) {
-		unassignedHitsInTree.removeAll(mip.getCalorimeterHits());
-	    }
-	    // Now look for clumps:
-	    List<Cluster> clumpClustersInTree = clumpFinder.createClusters(unassignedHitsInTree);
-	    clumps.addAll(clumpClustersInTree);
-	    for (Cluster clump : clumpClustersInTree) {
-		unassignedHitsInTree.removeAll(clump.getCalorimeterHits());
-	    }
-
-	    // Hit sharing book-keeping
-	    targetList.addAll(oldMipsInThisTree);
-	    targetList.addAll(mipClustersNewInTree);
-	    targetList.addAll(clumpClustersInTree);
-
-	    // For ECAL trees, go ahead and check for structure now, since we won't take 
-	    // another pass with a time cut. (For HCAL trees, we have to wait.)
-	    if (treeInECAL) {
-		// Scan for old MIPs in this tree:
-		boolean foundLargeMipPiece = false;
-		for (Cluster mip : oldMipsInThisTree) {
-		    Set<CalorimeterHit> overlappingHitsThisMIP = new HashSet<CalorimeterHit>();
-		    for (CalorimeterHit mipHit : mip.getCalorimeterHits()) {
-			if (allHitsInTree.contains(mipHit)) {
-			    overlappingHitsThisMIP.add(mipHit);
-			}
-		    }
-		    if (overlappingHitsThisMIP.size()==0) {
-			throw new AssertionError("Internal consistency failure");
-		    }
-		    // Count as structure found if at least four MIP hits and/or at least half of MIP found
-		    boolean foundFourHits = (overlappingHitsThisMIP.size() >= 4);
-		    boolean foundHalfMip  = (overlappingHitsThisMIP.size() * 2 >= mip.getCalorimeterHits().size());
-		    if (foundHalfMip || foundFourHits) {
-			if (overlappingHitsThisMIP.size()==0) { throw new AssertionError("Empty MIP overlap found"); }
-			foundLargeMipPiece = true;
-		    }
-		    if (overlappingHitsThisMIP.size()>0) {
-			BasicCluster mipPieceFound = new BasicCluster();
-			for (CalorimeterHit mipHit : overlappingHitsThisMIP) {
-			    mipPieceFound.addHit(mipHit);
-			}
-		    }
-		}
-
-		// Separate out the found structure and the rest of the hits:
-		BasicCluster treeMinusStructure = new BasicCluster();
-		for (CalorimeterHit hit : unassignedHitsInTree) {
-		    treeMinusStructure.addHit(hit);
-		}	       
-
-		// OK. Now, for the case that:
-		//   1) The tree cluster is large, AND
-		//   2) We found no structure
-		// we'll go ahead and assign the whole thing as a block.
-		boolean treeIsLarge = (allHitsInTree.size() >= m_minHitsToBeTreatedAsClusterECAL);
-		boolean noStructureFound = (clumpClustersInTree.size()==0 && mipClustersNewInTree.size()==0 && !foundLargeMipPiece);
-		if (treeIsLarge && noStructureFound) {
-		    // Assign whole things as a block
-		    if (treeMinusStructure.getCalorimeterHits().size()>0) {
-			treesWithNoStructure.add(treeMinusStructure);
-			targetList.add(treeMinusStructure);
-		    }
-		} else {
-		    // Share remaining hits
-		    if (treeMinusStructure.getCalorimeterHits().size()>0) {
-			leftoverHitClusters.add(treeMinusStructure);
-			treeOfSharedCluster.put(treeMinusStructure, dTreeCluster);
-		    }
-		}
-	    }
-	}
-
-	// Book-keeping
-	List<Cluster> mips = new Vector<Cluster>();
-	mips.addAll(mipsOld);
-	mips.addAll(mipsNew);
-	Set<CalorimeterHit> mipHits = new HashSet<CalorimeterHit>();
-	Set<CalorimeterHit> clumpHits = new HashSet<CalorimeterHit>();
-	for (Cluster clus : mips  ) { mipHits  .addAll(clus.getCalorimeterHits()); }
-	for (Cluster clus : clumps) { clumpHits.addAll(clus.getCalorimeterHits()); }
-
-	// OK. Now look for other structure in the HCAL with a time cut
-	for (Cluster dTree : dTreeClustersTimeCut) {
-	    // What structure did we find inside this tree?
-	    Set<CalorimeterHit> allHitsInTree = new HashSet<CalorimeterHit>();
-	    allHitsInTree.addAll(dTree.getCalorimeterHits());
-	    Set<CalorimeterHit> clumpHitsInTree = new HashSet<CalorimeterHit>();
-	    for (CalorimeterHit hit : allHitsInTree) {
-		if (clumpHits.contains(hit)) {
-		    clumpHitsInTree.add(hit);
-		}
-	    }
-	    Set<CalorimeterHit> mipHitsInTree = new HashSet<CalorimeterHit>();
-	    for (CalorimeterHit hit : allHitsInTree) {
-		if (mipHits.contains(hit)) {
-		    mipHitsInTree.add(hit);
-		}
-	    }
-
-	    // Look for cases where:
-	    //   * The cluster is not small
-	    //   * A significant fraction of hits are neither MIP nor clump
-	    // ... in those cases, we'll take the rest of the cluster as a block.
-	    // [Should we run something like MST on it to be sure?]
-	    //
-	    // For now, codify the criteria as:
-	    //   * At least 10 unused hits
-	    //   * At least 2/3 of hits are unused
-	    List<CalorimeterHit> unusedHitsInTree = new Vector<CalorimeterHit>();
-	    for (CalorimeterHit hit : allHitsInTree) {
-		if (!mipHitsInTree.contains(hit) && !clumpHitsInTree.contains(hit)) {
-		    unusedHitsInTree.add(hit);
-		}
-	    }
-	    int countUnusedHits = unusedHitsInTree.size();
-	    boolean allHitsUnused = (countUnusedHits == allHitsInTree.size());
-	    boolean atLeastTenUnusedHits = (countUnusedHits >= 10);
-	    boolean atLeastTwoThirdsUnused = (countUnusedHits * 3 >= allHitsInTree.size() * 2);
-	    if (atLeastTenUnusedHits && atLeastTwoThirdsUnused) {
-		// Look for blocks among the rest of the hits
-		ClumpFinder localClumpFinder = new ClumpFinder();
-		localClumpFinder.setDensityThresholds(4,12); // looser than usual
-		List<Cluster> blocksFound = localClumpFinder.createClusters(unusedHitsInTree);
-		if (blocksFound.size()==0 && allHitsUnused) {
-		    // Special case: didn't find any structure or blocks.
-		    // Take whole thing
-		    BasicCluster block = new BasicCluster();
-		    for (CalorimeterHit hit : unusedHitsInTree) {
-			block.addHit(hit);
-		    }
-		    blocksFound.add(block);
-		}
-		for (Cluster block : blocksFound) {
-		    if (block.getCalorimeterHits().size() > 8) {
-			// FIXME: Don't hardcode cut of 8 hits
-			treesWithNoStructure.add(block);
-		    }
-		}
-	    }
-	}
-
-	// Book-keeping
-	Set<CalorimeterHit> blockHits = new HashSet<CalorimeterHit>();
-	for (Cluster clus : treesWithNoStructure) { blockHits.addAll(clus.getCalorimeterHits()); }
-
-	// OK. Finally, go back and scan HCAL over whole event. For DTreeClusters:
-	//   * Any with NO structure and at least N hits are assigned as blocks
-	//   * Any with no structure but too few hits are shared as blocks
-	//   * Any with structure have remaining hits shared as DTree.
-	// Hmm. What should we do if tree is large and there is a teeny bit of structure?
-	// (e.g. a 100 hit cluster, of which 2 from a MIP)
-	for (Cluster dTreeCluster : dTreeClusters_Hcal) {
-	    List<CalorimeterHit> unassignedHitsInTree = new Vector<CalorimeterHit>();
-	    unassignedHitsInTree.addAll(dTreeCluster.getCalorimeterHits());
-	    unassignedHitsInTree.removeAll(mipHits);
-	    unassignedHitsInTree.removeAll(clumpHits);
-	    unassignedHitsInTree.removeAll(blockHits);
-	    boolean treeIsLarge = (dTreeCluster.getCalorimeterHits().size() >= m_minHitsToBeTreatedAsClusterHCAL);
-	    boolean noStructure = (unassignedHitsInTree.size() == dTreeCluster.getCalorimeterHits().size());
-	    if (treeIsLarge && noStructure) {
-		// Large cluster with no structure -- treat whole thing as a block
-		BasicCluster newBlock = new BasicCluster();
-		for (CalorimeterHit hit : dTreeCluster.getCalorimeterHits()) {
-		    newBlock.addHit(hit);
-		}
-		treesWithNoStructure.add(newBlock);
-		targetsInTree.get(dTreeCluster).add(newBlock); // Make sure target points to tree
-	    } else if (unassignedHitsInTree.size()>0) {
-		// There is structure and/or tree is small -- share hits (assuming any left)
-		BasicCluster treeMinusStructure = new BasicCluster();
-		for (CalorimeterHit hit : unassignedHitsInTree) {
-		    treeMinusStructure.addHit(hit);
-		}
-		leftoverHitClusters.add(treeMinusStructure);
-		treeOfSharedCluster.put(treeMinusStructure, dTreeCluster);
-		// Special: If we assigned some as a block earlier, need to update book-keeping for sharing
-		Set<CalorimeterHit> allHitsInTree = new HashSet<CalorimeterHit>();
-		allHitsInTree.addAll(dTreeCluster.getCalorimeterHits());
-		for (Cluster clus : treesWithNoStructure) {
-		    boolean blockInTree = false;
-		    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
-			if (allHitsInTree.contains(hit)) {
-			    blockInTree = true;
-			    break;
-			}
-		    }
-		    if (blockInTree) {
-			// This block is at least partially inside this tree => note that in book-keeping
-			targetsInTree.get(dTreeCluster).add(clus);
-		    }
-		}
-	    }
-	}
-
-	// Now, which points to what?
-
-	// OK! Back to the usual. From here on it's copy-paste coding unless otherwise marked.
-	// -----------------------------------------------------------------------------------
-
-	// Match tracks
-	Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
-	Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
-
-	// Handle photons
-	List<Cluster> electronClusters = new Vector<Cluster>();
-	List<Track>   electronTracks   = new Vector<Track>();
-	List<Cluster> chargedHadronLikePhotons = new Vector<Cluster>();
-	List<Cluster> modifiedPhotonClusters = new Vector<Cluster>();
-	List<Cluster> photonLikePhotons = new Vector<Cluster>();
-	photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, trackList, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
-
-	// Resume track matching
-	List<Cluster> allMatchableClusters = new Vector<Cluster>();
-	allMatchableClusters.addAll(mips);
-	allMatchableClusters.addAll(clumps);
-	if (m_allowPhotonSeeds) {
-	    allMatchableClusters.addAll(chargedHadronLikePhotons);
-	}
-	allMatchableClusters.addAll(leftoverHitClusters);
-	allMatchableClusters.addAll(treesWithNoStructure);
-	if (allMatchableClusters.contains(null)) { throw new AssertionError("Book-keeping error: null cluster in list allMatchableClusters"); }
-
-	if (m_debug) { System.out.println("Attempting to match "+allMatchableClusters.size()+" matchable clusters to "+trackList.size()+" tracks"); }
-	for (Track tr : trackList) {
-	    if (electronTracks.contains(tr)) {
-		continue; // Those are already assigned!
-	    }
-	    Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, allMatchableClusters);
-	    if (matchedCluster != null) {
-		tracksMatchedToClusters.put(tr, matchedCluster);
-		List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
-		if (clusTrList == null) { 
-		    clusTrList = new Vector<Track>(); 
-		    clustersMatchedToTracks.put(matchedCluster, clusTrList); 
-		}
-		clusTrList.add(tr);
-	    }
-	}
-
-	// Optionally, split photon seeds
-	List<Cluster> photonFragments = new Vector<Cluster>();
-	if (m_splitPhotonSeeds) {
-	    splitPhotonSeeds(clustersMatchedToTracks, tracksMatchedToClusters, modifiedPhotonClusters, electronClusters, photonLikePhotons, chargedHadronLikePhotons, photonFragments, mipsOld, mipsNew, mips, clumps, treeOfMip, treeOfClump, treeOfLeftoverHits, treesWithNoStructure);
-	}
-	// Unmatched tracks
-	List<Track> unmatchedTracks = new Vector<Track>();
-	unmatchedTracks.addAll(trackList);
-	unmatchedTracks.removeAll(tracksMatchedToClusters.keySet());
-	List<Track> unmatchedTracksThatDontReachCalorimeter = new Vector<Track>();
-	for (Track tr : unmatchedTracks) {
-	    LocalHelixExtrapolationTrackClusterMatcher debugTrackMatch = new LocalHelixExtrapolationTrackClusterMatcher();
-	    debugTrackMatch.process(m_event);
-	    Cluster debugMatchedCluster = debugTrackMatch.matchTrackToCluster(tr, allMatchableClusters);
-	    if (debugMatchedCluster != null) {
-		// This can happen sometimes if it pointed to a photon that was broken up severely.
-		// In any case, it clearly pointed to the calorimeter so we shouldn't
-		// add on the track momentum (that would be double-counting)
-	    } else {
-		Hep3Vector interceptPoint = debugTrackMatch.getExtrapolator().extendToECALLayer(0);
-		if (interceptPoint == null) {
-		    // No valid extrap to calorimeter
-		    unmatchedTracksThatDontReachCalorimeter.add(tr);
-		} else {
-		    // Reached calorimeter, but no match => we'll treat as NH or similar
-		}
-	    }
-	}
-
-	// Figure out whether tracks were uniquely matched or not:
-	Set<Track> uniquelyMatchedTracks = new HashSet<Track>();
-	Set<Track> ambiguouslyMatchedTracks = new HashSet<Track>();
-	List<Set<Track>> ambiguouslyMatchedTrackGroups = new Vector<Set<Track>>();
-	for (Cluster clus : clustersMatchedToTracks.keySet()) {
-	    List<Track> tracksMatchedToThisCluster = clustersMatchedToTracks.get(clus);
-	    if (tracksMatchedToThisCluster.size() > 1) {
-		ambiguouslyMatchedTracks.addAll(tracksMatchedToThisCluster);
-		Set<Track> newSet = new HashSet<Track>();
-		newSet.addAll(tracksMatchedToThisCluster);
-		ambiguouslyMatchedTrackGroups.add(newSet);
-	    } else {
-		uniquelyMatchedTracks.addAll(tracksMatchedToThisCluster);
-	    }
-	}
-	if (uniquelyMatchedTracks.size() + ambiguouslyMatchedTracks.size() != tracksMatchedToClusters.keySet().size()) { throw new AssertionError("Book-keeping error: "+uniquelyMatchedTracks.size()+" + "+ambiguouslyMatchedTracks.size()+" != "+tracksMatchedToClusters.keySet().size()); }
-
-	// Redo ambiguous tracks hackishly:
-	// Similarly, redo track -> cluster map to use tweaked tracks
-	Map<Track, Cluster> tweakedTracksMatchedToClusters = new HashMap<Track,Cluster>();
-	List<Track> tweakedTracks = new Vector<Track>();
-	Map<Track, Track> mapOrigTrackToTweakedTrack = new HashMap<Track,Track>();
-	tweakedTracks.addAll(uniquelyMatchedTracks);
-	for (Track tr : uniquelyMatchedTracks) {
-	    mapOrigTrackToTweakedTrack.put(tr,tr);
-	}
-	for (Set<Track> trackSet : ambiguouslyMatchedTrackGroups) {
-	    MultipleTrackTrack tweakedTrack = new MultipleTrackTrack(trackSet);
-	    tweakedTracks.add(tweakedTrack);
-	    for (Track origTrack : trackSet) {
-		Cluster clus = tracksMatchedToClusters.get(origTrack);
-		tweakedTracksMatchedToClusters.put(tweakedTrack, clus); // over-writes sometimes, but that's OK.
-		mapOrigTrackToTweakedTrack.put(origTrack, tweakedTrack);
-	    }
-	}
-	for (Track tr : uniquelyMatchedTracks) {
-	    Cluster clus = tracksMatchedToClusters.get(tr);
-	    tweakedTracksMatchedToClusters.put(tr, clus);
-	}
-
-	// Track seeds
-	Set<Cluster> seeds = clustersMatchedToTracks.keySet();
-	List<Cluster> seedLeftoverHitClusters = new Vector();
-	List<Cluster> nonSeedLeftoverHitClusters = new Vector();
-	for (Cluster clus : leftoverHitClusters) {
-	    if (seeds.contains(clus) ) {
-		seedLeftoverHitClusters.add(clus);
-	    } else {
-		nonSeedLeftoverHitClusters.add(clus);
-	    }
-	}
-	if ( seedLeftoverHitClusters.size() + nonSeedLeftoverHitClusters.size()  != leftoverHitClusters.size() ) { throw new AssertionError("Book-keeping error"); }
-
-	List<Cluster> seedElectronClusters = new Vector();
-	List<Cluster> seedHadronLikePhotonClusters = new Vector();
-	List<Cluster> nonSeedHadronLikePhotonClusters = new Vector();
-	List<Cluster> nonSeedPhotonLikePhotonClusters = new Vector();
-	for (Cluster clus : electronClusters) {
-	    if (seeds.contains(clus)) {
-		seedElectronClusters.add(clus);
-	    } else {
-		throw new AssertionError("Electron cluster is not a seed!");
-	    }
-	}
-	for (Cluster clus : photonLikePhotons) {
-	    if (seeds.contains(clus)) {
-		throw new AssertionError("Photon cluster is a seed!");
-	    } else {
-		nonSeedPhotonLikePhotonClusters.add(clus);
-	    }
-	}
-	for (Cluster clus : chargedHadronLikePhotons) {
-	    if (seeds.contains(clus)) {
-		seedHadronLikePhotonClusters.add(clus);
-	    } else {
-		nonSeedHadronLikePhotonClusters.add(clus);
-	    }
-	}
-	if (seedElectronClusters.size() + seedHadronLikePhotonClusters.size() + nonSeedHadronLikePhotonClusters.size() + nonSeedPhotonLikePhotonClusters.size() != modifiedPhotonClusters.size() ) { throw new AssertionError("Book-keeping error"); }
-
-	// Sort matched tracks by momentum
-	List<Track> tracksSortedByMomentum = new Vector<Track>();
-	for (Track tr : tweakedTracksMatchedToClusters.keySet()) {
-	    tracksSortedByMomentum.add(tr);
-	}
-	Collections.sort(tracksSortedByMomentum, new MomentumSort());
-
-	// Prep for linking
-	List<Cluster> linkableClusters = new Vector<Cluster>();
-	List<Cluster> smallClustersToShare = new Vector<Cluster>();
-	List<Cluster> leftoverHitClustersToShare = new Vector<Cluster>();
-	List<Cluster> leftoverHitClustersAllowedInShowers = new Vector<Cluster>();
-	linkableClusters.addAll(mips);
-	linkableClusters.addAll(modifiedPhotonClusters);
-	linkableClusters.addAll(clumps);
-	linkableClusters.addAll(treesWithNoStructure);
-	for (Cluster clus : leftoverHitClusters) {
-	    if (seeds.contains(clus)) {
-		linkableClusters.add(clus);
-	    } else if (clus.getCalorimeterHits().size() < 3) {
-		smallClustersToShare.add(clus);
-	    } else {
-		leftoverHitClustersToShare.add(clus);
-	    }
-	}
-	smallClustersToShare.addAll(photonFragments);
-
-	// DEBUG
-	{
-	    // Fudge MIPs to make them visible...
-	    List<Cluster> fudged_mips = new Vector<Cluster>();
-	    List<Cluster> fudged_mips_old = new Vector<Cluster>();
-	    List<Cluster> fudged_mips_new = new Vector<Cluster>();
-	    for (Cluster clus : mipsOld) {
-		BasicCluster newClus = new BasicCluster();
-		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
-		    newClus.addHit(hit);
-		}
-		fudged_mips_old.add(newClus);
-	    }
-	    for (Cluster clus : mipsNew) {
-		BasicCluster newClus = new BasicCluster();
-		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
-		    newClus.addHit(hit);
-		}
-		fudged_mips_new.add(newClus);
-	    }
-	    fudged_mips.addAll(fudged_mips_old);
-	    fudged_mips.addAll(fudged_mips_new);
-		
-	    m_event.put(m_outputParticleListName+"_MM_mips", fudged_mips);
-	    m_event.put(m_outputParticleListName+"_MM_mipsOld", fudged_mips_old);
-	    m_event.put(m_outputParticleListName+"_MM_mipsNew", fudged_mips_new);
-
-	    m_event.put(m_outputParticleListName+"_MM_clumps", clumps);
-	    m_event.put(m_outputParticleListName+"_MM_blocks", treesWithNoStructure);
-	    m_event.put(m_outputParticleListName+"_MM_photons", modifiedPhotonClusters);
-	    m_event.put(m_outputParticleListName+"_MM_shared1", leftoverHitClustersToShare);
-	    m_event.put(m_outputParticleListName+"_MM_shared2", smallClustersToShare);
-	}
-
-	// Initially, cheat
-	resetPotentialLinks();
-	if (m_cheatOnScoring) {
-	    initPotentialLinks_cheating(linkableClusters, clustersMatchedToTracks);
-	} else {
-	    double scaleFactorTrackToTrack = 1.0;
-	    double scaleFactorTrackToClump = 1.0;
-	    double scaleFactorTrackToSmallSeed = 1.0;
-	    double thresholdForProximity = 50.0; // 5cm. FIXME: Don't hard-code.
-	    double thresholdForProximityClump = 75.0; // FIXME: Don't hard-code.
-	    double penaltyForNewMips = 1.0; // FIXME: Don't hard-code.
-	    initPotentialLinks_SkeletonsWithinLargeClus(largeClusters, mipsOld, mipsNew, clumps, scaleFactorTrackToTrack, scaleFactorTrackToClump, penaltyForNewMips);
-	    initPotentialLinks_MipMip(mipsOld, scaleFactorTrackToTrack, true);
-	    initPotentialLinks_MipMip(mipsOld, mipsNew, scaleFactorTrackToTrack*penaltyForNewMips, true);
-	    initPotentialLinks_MipMip(mipsNew, scaleFactorTrackToTrack*penaltyForNewMips*penaltyForNewMips, true);
-	    initPotentialLinks_MipClump(mipsOld, clumps, scaleFactorTrackToClump, true);
-	    initPotentialLinks_MipClump(mipsNew, clumps, scaleFactorTrackToClump*penaltyForNewMips, true);
-	    initPotentialLinks_MipMisc(mipsOld, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
-	    initPotentialLinks_MipMisc(mipsNew, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
-	    initPotentialLinks_MipMisc(mipsOld, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
-	    initPotentialLinks_MipMisc(mipsNew, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
-	    initPotentialLinks_MipMisc(mipsOld, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
-	    initPotentialLinks_MipMisc(mipsNew, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
-
-	    initPotentialLinks_MiscSelf(clumps, thresholdForProximityClump, "Clump", true);
-	    initPotentialLinks_MiscMisc(clumps, treesWithNoStructure, thresholdForProximity, "Clump", "LargeStructurelessTree");
-	    initPotentialLinks_MiscMisc(clumps, seedLeftoverHitClusters, thresholdForProximity, "Clump", "SmallSeed");
-
-	    initPotentialLinks_MiscMisc(seedLeftoverHitClusters, treesWithNoStructure, thresholdForProximity, "SmallSeed", "LargeStructurelessTree");
-
-	    initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
-	}
-
-	// Done making links. Prep & build skeletons:
-	sortLinks();
-
-	// Try to build skeletons, working with tracks in momentum-sorted order...
-	Map<Track,Double> newMapTrackToThreshold = new HashMap<Track,Double>(); // for geometrical checks
-	Map<Track,Double> newMapTrackToTolerance = new HashMap<Track,Double>(); // for E/p checks
-	for (Track tr : tracksSortedByMomentum) {
-	    newMapTrackToThreshold.put(tr, new Double(0.7));
-	    newMapTrackToTolerance.put(tr, new Double(1.0));
-	}
-	Map<Cluster, Track> newMapShowerComponentToTrack = null;
-	Map<Track, Set<Cluster>> newMapTrackToShowerComponents = null;
-	Map<Track, Set<Cluster>> newMapTrackToVetoedAdditions = null;
-
-	// Jet stuff
-	Map<Cluster, Set<Track>> newMapShowerComponentToJet = null;
-	Map<Set<Track>, Set<Cluster>> newMapJetToShowerComponents = null;
-	List<Set<Track>> jets = null;
-	Map<Track, Set<Track>> mapTrackToJet = null;
-
-	// Set up sharing
-	List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
-	ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
-	SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAlgForSmallClusters);
-	sharedSmallDTreeClusters.createShares(linkableClusters);
-	sharedSmallDTreeClusters.rebuildHints();
-	allSharedClusters.add(sharedSmallDTreeClusters);
-	DTreeClusterSharingAlgorithm dTreeSharingAlg = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, 150.0); // TEST
-	SharedClusterGroup sharedLeftoverHitClusters = new SharedClusterGroup(leftoverHitClustersToShare, dTreeSharingAlg);
-	sharedLeftoverHitClusters.createShares(linkableClusters);
-	sharedLeftoverHitClusters.rebuildHints();
-	allSharedClusters.add(sharedLeftoverHitClusters);
-
-	// Iterate to build clusters:
-	for (int iIter=0; iIter<10; iIter++) {
-	    newMapShowerComponentToTrack = new HashMap<Cluster, Track>();
-	    newMapTrackToShowerComponents = new HashMap<Track, Set<Cluster>>();
-	    newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
-	    Map<Track, Set<Cluster>> mapTrackToVetoedAdditionsDueToTrackSeed = new HashMap<Track, Set<Cluster>>();
-
-	    clusteringIteration(tracksSortedByMomentum, tweakedTracksMatchedToClusters, electronTracks, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, allSharedClusters, newMapTrackToThreshold, newMapTrackToTolerance);
-
-	    // Check on E/p, update scoring, make over-rides.
-	    Set<Track> tracksWithEoverPTooLow = new HashSet<Track>();
-	    Set<Track> tracksWithEoverPMuchTooLow = new HashSet<Track>();
-	    Set<Track> tracksWithVetoedLinkToUnusedCluster = new HashSet<Track>();
-	    List<Set<Track>> jetLinks = new Vector<Set<Track>>();
-	    
-	    updateScoring(uniquelyMatchedTracks, ambiguouslyMatchedTracks, tracksSortedByMomentum, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, tracksWithEoverPTooLow, tracksWithEoverPMuchTooLow, tracksWithVetoedLinkToUnusedCluster, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks, linkableClusters);
-
-	    lookForVetoedLinks(tracksSortedByMomentum, tracksWithEoverPMuchTooLow, newMapTrackToShowerComponents, newMapShowerComponentToTrack, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks);
-
-	    // Try to cluster as jet (experimental)
-	    newMapShowerComponentToJet = new HashMap<Cluster, Set<Track>>();
-	    newMapJetToShowerComponents = new HashMap<Set<Track>, Set<Cluster>>();
-	    jets = new Vector<Set<Track>>();
-	    jetClustering(newMapShowerComponentToJet, newMapJetToShowerComponents, jets, jetLinks, newMapTrackToShowerComponents, newMapShowerComponentToTrack, seeds, allSharedClusters);
-	    // For ease of book-keeping, another way of accessing the info:
-	    mapTrackToJet = new HashMap<Track, Set<Track>>();
-	    for (Set<Track> jet : jets) {
-		for (Track tr : jet) {
-		    mapTrackToJet.put(tr, jet);
-		}
-	    }
-
-	    // Adjust tolerance (how wrong in E/p we'll allow a cluster addition to make us)
-	    // and threshold (how low in score we'll go for adding a link)
-	    double maximumAllowedTolerance = 2.5;
-	    double minimumAllowedThreshold = 0.3;
-	    boolean stateChange = false;
-
-	    for (Track tr : tracksWithVetoedLinkToUnusedCluster) {
-		boolean ignoreTrackDueToJet = (m_clusterAsJets && mapTrackToJet.get(tr)!=null);
-		if (!ignoreTrackDueToJet) {
-		    Double oldTolerance = newMapTrackToTolerance.get(tr);
-		    double newTolerance = oldTolerance.doubleValue() + 0.25;
-		    if (newTolerance <= maximumAllowedTolerance) {
-			newMapTrackToTolerance.put(tr, newTolerance);
-			stateChange = true;
-		    }
-		}
-	    }
-	    for (Track tr : tracksWithEoverPTooLow) {
-		boolean ignoreTrackDueToJet = (m_clusterAsJets && mapTrackToJet.get(tr)!=null);
-		if (!ignoreTrackDueToJet) {
-		    Double oldThreshold = newMapTrackToThreshold.get(tr);
-		    double newThreshold = oldThreshold.doubleValue() - 0.05;
-		    if (newThreshold >= minimumAllowedThreshold) {
-			newMapTrackToThreshold.put(tr, new Double(newThreshold));
-			stateChange = true;
-		    }
-		}
-	    }
-
-	    if (!stateChange) {
-		// Reached steady state => stop
-		break;
-	    }
-	}
-
-	applyOverrides(linkableClusters, photonLikePhotons, electronClusters, tracksMatchedToClusters, newMapShowerComponentToTrack, newMapTrackToShowerComponents, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, newMapTrackToVetoedAdditions, newMapTrackToTolerance, allSharedClusters);
-
-        if (m_debug) {
-	    printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
-	}
-
-	// Outputs
-	makeParticlesAndWriteOut(trackList, tracksSortedByMomentum, unmatchedTracksThatDontReachCalorimeter, mapOrigTrackToTweakedTrack,
-				 tracksMatchedToClusters, clustersMatchedToTracks,
-				 electronTracks, modifiedPhotonClusters,electronClusters, photonLikePhotons, chargedHadronLikePhotons,
-				 seedHadronLikePhotonClusters, nonSeedHadronLikePhotonClusters, nonSeedPhotonLikePhotonClusters,
-				 newMapTrackToShowerComponents, newMapShowerComponentToTrack,
-				 linkableClusters,
-				 mips, clumps, treesWithNoStructure, seedLeftoverHitClusters,
-				 jets, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet,
-				 allSharedClusters, 
-				 smallClustersToShare, leftoverHitClustersToShare,
-				 allHits, allHitsEcalBarrel, allHitsEcalEndcap, allHitsHcalBarrel, allHitsHcalEndcap
-				 );
-	
-	m_event = null;
-	
-    }
-
-	/*
-	    for (Cluster mip : mipClustersOld) { treeOfMip.put(mip, dTreeCluster); }
-	    for (Cluster mip : mipClustersNew) { treeOfMip.put(mip, dTreeCluster); }
-	    for (Cluster clump : clumpClusters) { treeOfClump.put(clump, dTreeCluster); }
-	    int numMipsInTree = mipClustersNew.size() + mipClustersOld.size();
-	    int numClumpsInTree = clumpClusters.size();
-	    if (numMipsInTree==0 && numClumpsInTree==0 && hitsInTree.size()>=minHitsToBeTreatedAsCluster) {
-		// No structure found in tree. Treat it as a block.
-		treesWithNoStructure.add(dTreeCluster);
-	    } else {
-		// Structure found in tree. Rest are leftover hits:
-		if (hitsInTree.size() > 0) {
-		    List<CalorimeterHit> leftoverHits = new Vector<CalorimeterHit>(hitsInTree.values());
-		    if (leftoverHits.contains(null)) { throw new AssertionError("null hit in leftoverHits"); }
-		    BasicCluster leftoverHitCluster = new BasicCluster();
-		    for (CalorimeterHit hit : leftoverHits) {
-			leftoverHitCluster.addHit(hit);
-		    }
-		    leftoverHitClusters.add(leftoverHitCluster);
-		    treeOfLeftoverHits.put(leftoverHitCluster, dTreeCluster);
-		}
-	    }
-	}
-	Map<Cluster, Cluster> treeOfComponent = new HashMap<Cluster,Cluster>();
-	treeOfComponent.putAll(treeOfMip);
-	treeOfComponent.putAll(treeOfClump);
-	treeOfComponent.putAll(treeOfLeftoverHits);
-
-	if (m_debug) {
-	    System.out.println("Found "+mips.size()+" mips, "+clumps.size()+" clumps, "+photons.size()+" photons, "+leftoverHitClusters.size()+" leftover-hit-clusters, "+treesWithNoStructure.size()+" large DTrees with no structure, and "+trackList.size()+" tracks in event.");
-	}
-
-	//////////////////////////////////////////////--///
-
-
-
-
-
-
-
-
-
-
-
-	if (m_debug) {
-	    System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
-	    for (Track tr : unmatchedTracks) {
-		System.out.println("  track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
-	    }
-	}
-
-
-
-
-
-
-
-
-
-
-	if (m_debug) {
-	    debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracks, seeds, tracksSortedByMomentum, tweakedTracksMatchedToClusters);
-	}
-
-
-
-
-
-
-	
-
-
-
-
-
-
-	    ///////////////////////////////////////////////////////////////////////////////////
-
-
-
-	    ///////////////////////////////////////////////////////////////////////////////////
-
-
-
-
-
-	*/
-}
CVSspam 0.2.8