Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDriver.java+1-261.17 -> 1.18
ReclusterDTreeDriver.java+42-2161.13 -> 1.14
+43-242
2 modified files
MJC: (contrib) Tidy up code a bit, remove some redundant/debug stuff, use MIPFinder from main tree rather than contrib

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.17 -> 1.18
diff -u -r1.17 -r1.18
--- ReclusterDriver.java	25 Mar 2008 22:46:01 -0000	1.17
+++ ReclusterDriver.java	26 Mar 2008 19:45:31 -0000	1.18
@@ -32,7 +32,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.17 2008/03/25 22:46:01 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.18 2008/03/26 19:45:31 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -1621,26 +1621,6 @@
 	return num/denom;
     }
     protected MCParticle quoteDominantParticle(Cluster clus) {
-// 	List<CalorimeterHit> hits = clus.getCalorimeterHits();
-// 	Map<MCParticle, List<SimCalorimeterHit>> truthMap = new HashMap<MCParticle, List<SimCalorimeterHit>>();
-// 	for (CalorimeterHit hit : hits) {
-// 	    SimCalorimeterHit simhit = (SimCalorimeterHit) (hit);
-// 	    int maxContrib = -1;
-// 	    for (int i=0; i<simhit.getMCParticleCount(); i++) {
-// 		MCParticle p = simhit.getMCParticle(i);
-// 		double e = simhit.getContributedEnergy(i);
-// 		if (maxContrib < 0 || e > simhit.getContributedEnergy(maxContrib)) {
-// 		    maxContrib = i;
-// 		}
-// 	    }
-// 	    MCParticle maxParticle = simhit.getMCParticle(maxContrib);
-// 	    List<SimCalorimeterHit> hitsOfThisParticle = truthMap.get(maxParticle);
-// 	    if (hitsOfThisParticle == null) {
-// 		hitsOfThisParticle = new Vector<SimCalorimeterHit>();
-// 		truthMap.put(maxParticle, hitsOfThisParticle);
-// 	    }
-// 	    hitsOfThisParticle.add(simhit);
-// 	}
 	Map<MCParticle, List<SimCalorimeterHit>> truthMap = truthFromCluster(clus);
 	MCParticle dominant = null;
 	for (MCParticle p : truthMap.keySet()) {
@@ -1999,11 +1979,6 @@
 	return energy(mainCluster, listOfShares, m_chargedCalib);
     }
     double energy(Cluster mainCluster, List<SharedClusterGroup> listOfShares, ClusterEnergyCalculator calib) {
-// 	double sumDeltaEnergy = 0.0;
-// 	for (SharedClusterGroup share : listOfShares) {
-// 	    sumDeltaEnergy += deltaEnergy(mainCluster, share, calib);
-// 	}
-// 	return energy(mainCluster, calib) + sumDeltaEnergy;
 	List<FuzzyCalorimeterHit> allFuzzyHits = new Vector<FuzzyCalorimeterHit>();
 	for (SharedClusterGroup share : listOfShares) {
 	    List<FuzzyCalorimeterHit> fuzzyHitsOfThisShare = findFuzzyHitsForCluster(mainCluster, share);

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.13 -> 1.14
diff -u -r1.13 -r1.14
--- ReclusterDTreeDriver.java	25 Mar 2008 22:46:01 -0000	1.13
+++ ReclusterDTreeDriver.java	26 Mar 2008 19:45:31 -0000	1.14
@@ -2,7 +2,6 @@
 
 import java.util.*; 
 import java.io.IOException; 
-import hep.aida.*;
 import hep.physics.vec.*;
 import hep.physics.particle.properties.*;
 import org.lcsim.util.*;
@@ -25,6 +24,19 @@
 import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
 import org.lcsim.geometry.*;
 
+/**
+  * An algorithm to recluster showers using E/p information
+  * from tracks to constrain and direct the clustering.
+  * Also makes use of topological/geometric information.
+  * Takes as input a set of clusters created with the
+  * DirectedTree algorithm (see SetUpDTreeForReclustering
+  * in this package, which uses the implementation in
+  * org.lcsim.recon.cluster.directedtree developed by NIU).
+  *
+  * @version $Id: ReclusterDTreeDriver.java,v 1.14 2008/03/26 19:45:31 mcharles Exp $
+  * @author Mat Charles <[log in to unmask]>
+  */
+
 public class ReclusterDTreeDriver extends ReclusterDriver {
 
     protected String m_dTreeClusterListName;
@@ -36,6 +48,7 @@
     protected boolean m_photonDebug = false;
     protected boolean m_photonSplitDebug = false;
     protected boolean m_jetDebug = false;
+    protected boolean m_writeExtraEventOutput = false;
 
     protected boolean m_useNewMipFinder = true;
     protected boolean m_useOldMipFinder = true;
@@ -60,16 +73,8 @@
     protected int m_punchThroughLayers = 5;
     protected int m_punchThroughHitMinimum = 4;
 
-    protected ICloud1D m_histo_unmatchedTracksMomentum;
-    protected ICloud1D m_histo_unmatchedTracksCosTheta;
-    protected ICloud2D m_histo_unmatchedTracksMomentumVsCosTheta;
-    protected ICloud1D m_histo_unmatchedTracksMultiplicity;
-    protected ICloud1D m_histo_unmatchedTracksMomentumSum;
-
     public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList) {
-	m_debugEoverP = false;
-	m_debug = false;
-
+	System.out.println("ReclusterDTreeDriver version 0.1");
 	initTrackMatch();
 	initCalibration();
 	initPlots();
@@ -78,12 +83,6 @@
 	m_mcList = mcList;
 	m_eval = new LikelihoodEvaluatorWrapper();
 	m_outputParticleListName = "DTreeReclusteredParticles";
-	// More plots
-	m_histo_unmatchedTracksMomentum = m_histoFactory.createCloud1D("Unmatched track momentum");
-	m_histo_unmatchedTracksCosTheta = m_histoFactory.createCloud1D("Unmatched track cos(polar angle)");
-	m_histo_unmatchedTracksMomentumVsCosTheta = m_histoFactory.createCloud2D("Unmatched tracks: momentum vs cos(polar angle)");
-	m_histo_unmatchedTracksMultiplicity = m_histoFactory.createCloud1D("Number of unmatched tracks per event");
-	m_histo_unmatchedTracksMomentumSum = m_histoFactory.createCloud1D("Total |p| of unmatched tracks in event");
     }
 
     public void process(EventHeader event) {
@@ -94,7 +93,6 @@
 	List<Cluster> dTreeClusters = event.get(Cluster.class, m_dTreeClusterListName);
 	List<Track> trackList = event.get(Track.class, m_inputTrackList);
 	List<Cluster> photons = event.get(Cluster.class, "PhotonClustersForDTree");
-	//List<Cluster> largeClusters = event.get(Cluster.class, "MSTClustersLinkedWithTenOrMoreHits");
 	List<Cluster> largeClusters = dTreeClusters; // FIXME: NOT IDEAL! Perhaps run MST on DTree clusters?
 	if (trackList == null) { throw new AssertionError("Null track list!"); }
 	if (trackList.contains(null)) { throw new AssertionError("Track list contains null!"); }	
@@ -128,22 +126,13 @@
 	    }
 	}
 
-	// Here are the clusterers:
-	//Clusterer clumpfinder = new ClumpFinder();
-	//Clusterer oldMipfinder = new TrackClusterDriver();
-	//Clusterer newMipFinder_ECAL = new TestNewMipFinder(20.0);
-	//Clusterer newMipFinder_HCAL = new TestNewMipFinder(50.0);
-
 	// Lists of subclusters
 	List<Cluster> mips = new Vector<Cluster>();
 	List<Cluster> mipsOld = new Vector<Cluster>();
 	List<Cluster> mipsNew = new Vector<Cluster>();
 	List<Cluster> clumps = new Vector<Cluster>();
 	List<Cluster> leftoverHitClusters = new Vector<Cluster>();
-	// Make two-way maps (from tree to contents, and from subclusters to tree)
-	//Map<Cluster, List<Cluster>>   mipsInTrees = new HashMap<Cluster, List<Cluster>>();
-	//Map<Cluster, List<Cluster>> clumpsInTrees = new HashMap<Cluster, List<Cluster>>();
-	//Map<Cluster, List<CalorimeterHit>> otherHitsInTrees = new HashMap<Cluster, List<CalorimeterHit>>();
+	// Make maps (from subclusters to tree)
 	Map<Cluster, Cluster> treeOfMip = new HashMap<Cluster, Cluster>();
 	Map<Cluster, Cluster> treeOfClump = new HashMap<Cluster,Cluster>();
 	Map<Cluster, Cluster> treeOfLeftoverHits = new HashMap<Cluster,Cluster>();
@@ -177,7 +166,6 @@
 	    int numMipsInTree = mipClustersNew.size() + mipClustersOld.size();
 	    int numClumpsInTree = clumpClusters.size();
 	    if (numMipsInTree==0 && numClumpsInTree==0 && hitsInTree.size()>=minHitsToBeTreatedAsCluster) {
-	    //if (mips.size()==0 && clumps.size()==0 && hitsInTree.size()>=minHitsToBeTreatedAsCluster) {
 		// No structure found in tree. Treat it as a block.
 		treesWithNoStructure.add(dTreeCluster);
 	    } else {
@@ -185,7 +173,6 @@
 		if (hitsInTree.size() > 0) {
 		    List<CalorimeterHit> leftoverHits = new Vector<CalorimeterHit>(hitsInTree.values());
 		    if (leftoverHits.contains(null)) { throw new AssertionError("null hit in leftoverHits"); }
-		    //otherHitsInTrees.put(dTreeCluster, leftoverHits);
 		    BasicCluster leftoverHitCluster = new BasicCluster();
 		    for (CalorimeterHit hit : leftoverHits) {
 			leftoverHitCluster.addHit(hit);
@@ -417,9 +404,6 @@
 	List<Cluster> allMatchableClusters = new Vector<Cluster>();
 	allMatchableClusters.addAll(mips);
 	allMatchableClusters.addAll(clumps);
-	//allMatchableClusters.addAll(photons);
-	//allMatchableClusters.removeAll(electronClusters); // Those are already assigned!
-	//allMatchableClusters.removeAll(photonLikePhotons); // Those shouldn't be part of a hadronic shower
 	if (m_allowPhotonSeeds) {
 	    allMatchableClusters.addAll(chargedHadronLikePhotons);
 	}
@@ -571,12 +555,10 @@
 	    }
 	}
 
-
 	// Unmatched tracks
 	List<Track> unmatchedTracks = new Vector<Track>();
 	unmatchedTracks.addAll(trackList);
 	unmatchedTracks.removeAll(tracksMatchedToClusters.keySet());
-	m_histo_unmatchedTracksMultiplicity.fill(unmatchedTracks.size());
 	List<Track> unmatchedTracksThatDontReachCalorimeter = new Vector<Track>();
 	for (Track tr : unmatchedTracks) {
 	    LocalHelixExtrapolationTrackClusterMatcher debugTrackMatch = new LocalHelixExtrapolationTrackClusterMatcher();
@@ -597,21 +579,6 @@
 	    }
 	}
 
-	// debug
-	{
-	    double momentumSum = 0.0;
-	    for (Track tr : unmatchedTracks) {
-		Hep3Vector p3 = new BasicHep3Vector(tr.getMomentum());
-		double p = p3.magnitude();
-		double pz = p3.z();
-		double cosTheta = pz/p;
-		momentumSum += p;
-		m_histo_unmatchedTracksMomentum.fill(p);
-		m_histo_unmatchedTracksCosTheta.fill(cosTheta);
-		m_histo_unmatchedTracksMomentumVsCosTheta.fill(cosTheta, p);
-	    }
-	    m_histo_unmatchedTracksMomentumSum.fill(momentumSum);		
-	}
 	if (m_debug) {
 	    System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
 	    for (Track tr : unmatchedTracks) {
@@ -709,41 +676,12 @@
 	    debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracks, seeds, tracksSortedByMomentum, tweakedTracksMatchedToClusters);
 	}
 
-	// A common way that a cluster can be identified as a photon but also
-	// have a track is for it to be an electron.
-	//
-	// This can go wrong if:
-	//   * The particle is actually a hadron (or a hadron overlapping with a photon)
-	//   * The hadron shower continues outside the "photon cluster".
-	//   * The energy of the "photon cluster" happens to correspond closely
-	//     to the momentum of the track.
-// 	for (Track tr : tracksSortedByMomentum) {
-// 	    Cluster seed = tweakedTracksMatchedToClusters.get(tr);
-// 	    if (photons.contains(seed)) {
-// 		if (!seedPhotonClusters.contains(seed)) { throw new AssertionError("Book-keeping error"); }
-// 		double energyAssumingElectron = energy(seed, m_photonCalib);
-// 		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
-// 		double residual = trackMomentum - energyAssumingElectron;
-// 		double estimatedError = 0.2 * Math.sqrt(trackMomentum);
-// 		if (trackMomentum < 1.0) { 
-// 		    // Don't shrink the error too much.
-// 		    estimatedError = 0.2; 
-// 		}
-// 		if (residual/estimatedError > -2.0 && residual/estimatedError < 2.0) {
-// 		    // Accept as electron
-// 		    electronClusters.add(seed);
-// 		    electronTracks.add(tr);
-// 		}
-// 	    }
-// 	}
-
 	// Prep for linking
 	List<Cluster> linkableClusters = new Vector<Cluster>();
 	List<Cluster> smallClustersToShare = new Vector<Cluster>();
 	List<Cluster> leftoverHitClustersToShare = new Vector<Cluster>();
 	List<Cluster> leftoverHitClustersAllowedInShowers = new Vector<Cluster>();
 	linkableClusters.addAll(mips);
-	//linkableClusters.addAll(photons);
 	linkableClusters.addAll(modifiedPhotonClusters);
 	linkableClusters.addAll(clumps);
 	linkableClusters.addAll(treesWithNoStructure);
@@ -779,35 +717,16 @@
 	    initPotentialLinks_MipMisc(mipsNew, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
 	    initPotentialLinks_MipMisc(mipsOld, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
 	    initPotentialLinks_MipMisc(mipsNew, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
-	    //initPotentialLinks_MipMisc(mipsOld, nonSeedHadronLikePhotonClusters, thresholdForProximity, "Photon");
-	    //initPotentialLinks_MipMisc(mipsNew, nonSeedHadronLikePhotonClusters, thresholdForProximity, "Photon");
 	    initPotentialLinks_MipMisc(mipsOld, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
 	    initPotentialLinks_MipMisc(mipsNew, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
 
 	    initPotentialLinks_MiscSelf(clumps, thresholdForProximityClump, "Clump", true);
 	    initPotentialLinks_MiscMisc(clumps, treesWithNoStructure, thresholdForProximity, "Clump", "LargeStructurelessTree");
-	    //initPotentialLinks_MiscMisc(clumps, seedHadronLikePhotonClusters, thresholdForProximity, "Clump", "Photon");
-	    //initPotentialLinks_MiscMisc(clumps, nonSeedHadronLikePhotonClusters, thresholdForProximity, "Clump", "Photon");
 	    initPotentialLinks_MiscMisc(clumps, seedLeftoverHitClusters, thresholdForProximity, "Clump", "SmallSeed");
 
 	    initPotentialLinks_MiscMisc(seedLeftoverHitClusters, treesWithNoStructure, thresholdForProximity, "SmallSeed", "LargeStructurelessTree");
-	    //initPotentialLinks_MiscMisc(seedLeftoverHitClusters, nonSeedHadronLikePhotonClusters, thresholdForProximity, "SmallSeed", "Photon");
 
 	    initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
-	    //initPotentialLinks_MiscMisc(treesWithNoStructure, seedHadronLikePhotonClusters,thresholdForProximity, "Tree", "Photon");
-	    //initPotentialLinks_MiscMisc(treesWithNoStructure, nonSeedHadronLikePhotonClusters,thresholdForProximity, "Tree", "Photon");
-
-	    //initPotentialLinks_MiscMisc(seedHadronLikePhotonClusters, nonSeedHadronLikePhotonClusters,thresholdForProximity, "Photon", "Photon");
-	    //initPotentialLinks_MiscSelf(nonSeedHadronLikePhotonClusters, thresholdForProximityClump, "Photon", false);
-
-	    // Look for specific case where "photon" candidate is actually part of a hadronic shower:
-	    //   * photon ID passed
-	    //   * photon is not linked directly to the track
-	    //       -> it is not at the inner surface
-	    //       -> it is several layers in
-	    //initPotentialLinks_PhotonMip(nonSeedPhotonClusters, mips, 0.8); // put in a penalty factor
-            //initPotentialLinks_PhotonMisc(nonSeedPhotonClusters, clumps);
-            //initPotentialLinks_PhotonMisc(nonSeedPhotonClusters, treesWithNoStructure);
 	}
 
 	// Done making links. Prep & build skeletons:
@@ -832,7 +751,6 @@
 	double jetTolerance = 1.5; // don't hard-code
 	Map<Track, Set<Track>> mapTrackToJet = null;
 
-
 	// Set up sharing
 	List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
 	ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
@@ -1290,9 +1208,12 @@
 	while (!noOverridesOnPreviousPass) {
 	    noOverridesOnPreviousPass = true;
 	    for (Cluster clus : linkableClusters) {
-		//if (photons.contains(clus)) {
 		if (photonLikePhotons.contains(clus)) {
 		    // Don't eat photons
+		    // FIXME: I think this may broken twice:
+		    //  1) We don't make any photonLikePhotons, just hadron-like
+		    //  2) ... but we don't make links for non-seed photons, so
+		    //     this never comes up right now.
 		    continue;
 		}
 		if (electronClusters.contains(clus)) {
@@ -1410,7 +1331,6 @@
 	}
 
         if (m_debug) {
-	    //printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, photons, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
 	    printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
 	}
 
@@ -1471,8 +1391,6 @@
 	Set<Set<Cluster>> chargedShowersToTreatAsNeutral = new HashSet<Set<Cluster>>();
 	Set<Set<Cluster>> chargedShowersToTreatAsNeutralWithEoverPveto = new HashSet<Set<Cluster>>();
 
-	//Map<Set<Track>, Boolean> mapJetPunchThrough = new HashMap<Set<Track>, Boolean>();
-	//Map<Track, Boolean> mapPunchThrough = new HashMap<Track, Boolean>();
 	Set<Track> punchThroughTracks = new HashSet<Track>();
 	Set<Set<Track>> punchThroughJets = new HashSet<Set<Track>>();
 
@@ -1528,7 +1446,7 @@
 	    }
 	}
 
-	// Charged tracks -- see how we should handle them
+	// Charged tracks
 	for (Track tr : tracksSortedByMomentum) {
 	    if (m_clusterAsJets && mapTrackToJet.get(tr) != null) {
 		// Handle it as a jet instead
@@ -1783,7 +1701,6 @@
 		usedByCharged = usedByCharged || (jetOfCluster != null);
 	    }
 
-	    //boolean isPhoton = photons.contains(clus);
 	    boolean isPhoton = modifiedPhotonClusters.contains(clus);
 
 	    if (!usedByCharged && !isPhoton) {
@@ -1859,7 +1776,6 @@
 	}
 
 	// Photons (...)
-	//List<Cluster> photonsToUse = new Vector<Cluster>(photons);
 	List<Cluster> photonsToUse = new Vector<Cluster>(modifiedPhotonClusters);
 	photonsToUse.addAll(extraClustersToTreatAsPhotons);
 	List<Cluster> photonsUsed = new Vector<Cluster>();
@@ -1939,7 +1855,6 @@
 	outputParticleListTweakedForConfusionMatrix.addAll(outputPhotonParticleList);
 
 	if (m_debug) {
-	    //debugPrintHitStatus(allSharedClusters, tracksSortedByMomentum, newMapTrackToShowerComponents, newMapShowerComponentToTrack, photons, neutralClusterCores);
 	    debugPrintHitStatus(allSharedClusters, tracksSortedByMomentum, newMapTrackToShowerComponents, newMapShowerComponentToTrack, modifiedPhotonClusters, neutralClusterCores);
 	}
 
@@ -1978,36 +1893,6 @@
 		    allUsedCoresList.add(clus);
 		    if (allUsedCores.size() != allUsedCoresList.size()) { 
 			System.out.println("ERROR while adding cluster of "+clus.getCalorimeterHits().size()+" hits from track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
-			String printme = new String("Cluster is in these lists:");
-			if (dTreeClusters.contains(clus)) { printme += " [dTreeClusters]"; }
-			if (photons.contains(clus)) { printme += " [photons]"; }
-			if (modifiedPhotonClusters.contains(clus)) { printme += " [modifiedPhotonClusters]"; }
-			if (largeClusters.contains(clus)) { printme += " [largeClusters]"; }
-			if (mips.contains(clus)) { printme += " [mips]"; }
-			if (mipsOld.contains(clus)) { printme += " [mipsOld]"; }
-			if (mipsNew.contains(clus)) { printme += " [mipsNew]"; }
-			if (clumps.contains(clus)) { printme += " [clumps]"; }
-			if (leftoverHitClusters.contains(clus)) { printme += " [leftoverHitClusters]"; }
-			if (treesWithNoStructure.contains(clus)) { printme += " [treesWithNoStructure]"; }
-			if (electronClusters.contains(clus)) { printme += " [electronClusters]"; }
-			if (photonsMatchedToTracks.contains(clus)) { printme += " [photonsMatchedToTracks]"; }
-			if (photonLikePhotons.contains(clus)) { printme += " [photonLikePhotons]"; }
-			if (chargedHadronLikePhotons.contains(clus)) { printme += " [chargedHadronLikePhotons]"; }
-			if (allMatchableClusters.contains(clus)) { printme += " [allMatchableClusters]"; }
-			if (seedLeftoverHitClusters.contains(clus)) { printme += " [seedLeftoverHitClusters]"; }
-			if (nonSeedLeftoverHitClusters.contains(clus)) { printme += " [nonSeedLeftoverHitClusters]"; }
-			//if (seedPhotonClusters.contains(clus)) { printme += " [seedPhotonClusters]"; }
-			//if (nonSeedPhotonClusters.contains(clus)) { printme += " [nonSeedPhotonClusters]"; }
-			if (linkableClusters.contains(clus)) { printme += " [linkableClusters]"; }
-			if (smallClustersToShare.contains(clus)) { printme += " [smallClustersToShare]"; }
-			if (leftoverHitClustersToShare.contains(clus)) { printme += " [leftoverHitClustersToShare]"; }
-			if (unmatchedClusterPieces.contains(clus)) { printme += " [unmatchedClusterPieces]"; }
-			if (neutralClusterCores.contains(clus)) { printme += " [neutralClusterCores]"; }
-			if (photonsToUse.contains(clus)) { printme += " [photonsToUse]"; }
-			if (seeds.contains(clus)) { printme += " [seeds]"; }
-			if (unusedUnmatchedClusterPieces.contains(clus)) { printme += " [unusedUnmatchedClusterPieces]"; }
-			if (extraClustersToTreatAsPhotons.contains(clus)) { printme += " [extraClustersToTreatAsPhotons]"; }
-			System.out.println(printme);
 			throw new AssertionError("Mis-counting of clusters: "+allUsedCores.size()+" vs "+allUsedCoresList.size()); 
 		    }
 		}
@@ -2024,36 +1909,6 @@
 			for (Track tr : jet) {
 			    System.out.println("  track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
 			}
-			String printme = new String("Cluster is in these lists:");
-			if (dTreeClusters.contains(clus)) { printme += " [dTreeClusters]"; }
-			if (photons.contains(clus)) { printme += " [photons]"; }
-			if (modifiedPhotonClusters.contains(clus)) { printme += " [modifiedPhotonClusters]"; }
-			if (largeClusters.contains(clus)) { printme += " [largeClusters]"; }
-			if (mips.contains(clus)) { printme += " [mips]"; }
-			if (mipsOld.contains(clus)) { printme += " [mipsOld]"; }
-			if (mipsNew.contains(clus)) { printme += " [mipsNew]"; }
-			if (clumps.contains(clus)) { printme += " [clumps]"; }
-			if (leftoverHitClusters.contains(clus)) { printme += " [leftoverHitClusters]"; }
-			if (treesWithNoStructure.contains(clus)) { printme += " [treesWithNoStructure]"; }
-			if (electronClusters.contains(clus)) { printme += " [electronClusters]"; }
-			if (photonsMatchedToTracks.contains(clus)) { printme += " [photonsMatchedToTracks]"; }
-			if (photonLikePhotons.contains(clus)) { printme += " [photonLikePhotons]"; }
-			if (chargedHadronLikePhotons.contains(clus)) { printme += " [chargedHadronLikePhotons]"; }
-			if (allMatchableClusters.contains(clus)) { printme += " [allMatchableClusters]"; }
-			if (seedLeftoverHitClusters.contains(clus)) { printme += " [seedLeftoverHitClusters]"; }
-			if (nonSeedLeftoverHitClusters.contains(clus)) { printme += " [nonSeedLeftoverHitClusters]"; }
-			//if (seedPhotonClusters.contains(clus)) { printme += " [seedPhotonClusters]"; }
-			//if (nonSeedPhotonClusters.contains(clus)) { printme += " [nonSeedPhotonClusters]"; }
-			if (linkableClusters.contains(clus)) { printme += " [linkableClusters]"; }
-			if (smallClustersToShare.contains(clus)) { printme += " [smallClustersToShare]"; }
-			if (leftoverHitClustersToShare.contains(clus)) { printme += " [leftoverHitClustersToShare]"; }
-			if (unmatchedClusterPieces.contains(clus)) { printme += " [unmatchedClusterPieces]"; }
-			if (neutralClusterCores.contains(clus)) { printme += " [neutralClusterCores]"; }
-			if (photonsToUse.contains(clus)) { printme += " [photonsToUse]"; }
-			if (seeds.contains(clus)) { printme += " [seeds]"; }
-			if (unusedUnmatchedClusterPieces.contains(clus)) { printme += " [unusedUnmatchedClusterPieces]"; }
-			if (extraClustersToTreatAsPhotons.contains(clus)) { printme += " [extraClustersToTreatAsPhotons]"; }
-			System.out.println(printme);
 			throw new AssertionError("Mis-counting of clusters: "+allUsedCores.size()+" vs "+allUsedCoresList.size()); 
 		    }
 		}
@@ -2297,25 +2152,26 @@
 	m_event.put(m_outputParticleListName, outputParticleList);
 	m_event.put(m_outputParticleListName+"_withEoverPveto", outputParticleListWithEoverPveto);
 	m_event.put(m_outputParticleListName+"_forConfusionMatrix", outputParticleListTweakedForConfusionMatrix);
-	m_event.put(m_outputParticleListName+"_jetsOnly", outputParticleListForConfusionMatrix_jetTracksWithClusters);
-	m_event.put(m_outputParticleListName+"_unusedHits", particleOfUnusedHits);
-	m_event.put(m_outputParticleListName+"_chargedPassingEoverPveto", outputChargedParticleListWithEoverPveto_pass);
-	m_event.put(m_outputParticleListName+"_chargedFailingEoverPveto", outputChargedParticleListWithEoverPveto_fail);
-	m_event.put(m_outputParticleListName+"_debugEoverP", outputChargedParticleListWithClusterEnergy);
-	m_event.put(m_outputParticleListName+"_debugEoverP_punchThrough", outputChargedParticleListWithClusterEnergy_punchThrough);
-	m_event.put(m_outputParticleListName+"_debugEoverP_noPunchThrough", outputChargedParticleListWithClusterEnergy_noPunchThrough);
-	m_event.put(m_outputParticleListName+"_debugEoverP_oldCalib", outputChargedParticleListWithClusterEnergy_oldCalib);
-	m_event.put(m_outputParticleListName+"_debugEoverP_punchThrough_oldCalib", outputChargedParticleListWithClusterEnergy_punchThrough_oldCalib);
-	m_event.put(m_outputParticleListName+"_debugEoverP_noPunchThrough_oldCalib", outputChargedParticleListWithClusterEnergy_noPunchThrough_oldCalib);
-	m_event.put(m_outputParticleListName+"_dontReachCalorimeter", chargedParticlesThatDontReachCalorimeter);
-
-	m_event.put(m_outputParticleListName+"_photonLikePhotons", photonLikePhotons_particles);
-	m_event.put(m_outputParticleListName+"_chargedHadronLikePhotons", chargedHadronLikePhotons_particles);
-	m_event.put(m_outputParticleListName+"_electronClusters", electronClusters_particles);
-	m_event.put(m_outputParticleListName+"_seedHadronLikePhotonClusters", seedHadronLikePhotonClusters_particles);
-	m_event.put(m_outputParticleListName+"_nonSeedHadronLikePhotonClusters", nonSeedHadronLikePhotonClusters_particles);
-	m_event.put(m_outputParticleListName+"_nonSeedPhotonLikePhotonClusters", nonSeedPhotonLikePhotonClusters_particles);
-	
+	if (m_writeExtraEventOutput) {
+	    m_event.put(m_outputParticleListName+"_jetsOnly", outputParticleListForConfusionMatrix_jetTracksWithClusters);
+	    m_event.put(m_outputParticleListName+"_unusedHits", particleOfUnusedHits);
+	    m_event.put(m_outputParticleListName+"_chargedPassingEoverPveto", outputChargedParticleListWithEoverPveto_pass);
+	    m_event.put(m_outputParticleListName+"_chargedFailingEoverPveto", outputChargedParticleListWithEoverPveto_fail);
+	    m_event.put(m_outputParticleListName+"_debugEoverP", outputChargedParticleListWithClusterEnergy);
+	    m_event.put(m_outputParticleListName+"_debugEoverP_punchThrough", outputChargedParticleListWithClusterEnergy_punchThrough);
+	    m_event.put(m_outputParticleListName+"_debugEoverP_noPunchThrough", outputChargedParticleListWithClusterEnergy_noPunchThrough);
+	    m_event.put(m_outputParticleListName+"_debugEoverP_oldCalib", outputChargedParticleListWithClusterEnergy_oldCalib);
+	    m_event.put(m_outputParticleListName+"_debugEoverP_punchThrough_oldCalib", outputChargedParticleListWithClusterEnergy_punchThrough_oldCalib);
+	    m_event.put(m_outputParticleListName+"_debugEoverP_noPunchThrough_oldCalib", outputChargedParticleListWithClusterEnergy_noPunchThrough_oldCalib);
+	    m_event.put(m_outputParticleListName+"_dontReachCalorimeter", chargedParticlesThatDontReachCalorimeter);
+	    m_event.put(m_outputParticleListName+"_photonLikePhotons", photonLikePhotons_particles);
+	    m_event.put(m_outputParticleListName+"_chargedHadronLikePhotons", chargedHadronLikePhotons_particles);
+	    m_event.put(m_outputParticleListName+"_electronClusters", electronClusters_particles);
+	    m_event.put(m_outputParticleListName+"_seedHadronLikePhotonClusters", seedHadronLikePhotonClusters_particles);
+	    m_event.put(m_outputParticleListName+"_nonSeedHadronLikePhotonClusters", nonSeedHadronLikePhotonClusters_particles);
+	    m_event.put(m_outputParticleListName+"_nonSeedPhotonLikePhotonClusters", nonSeedPhotonLikePhotonClusters_particles);
+	}
+
 	m_event = null;
     }
 
@@ -2579,7 +2435,6 @@
 	}
 	return totalMomentum;
     }
-		    
 
     protected int countHitsInLastLayersOfHcal(ReconstructedParticle part, int nLayersToCheck) {
 	return countHitsInLastLayersOfHcal(part.getClusters(), nLayersToCheck);
@@ -2973,35 +2828,6 @@
 	return output;
     }
 
-    boolean passesSimplePhotonID(Cluster clus) {
-	double energy   = m_photonCalib.getEnergy(clus);
-	double originIP = impactParameterFromPhotonCoreToOrigin(clus);
-	double radius50 = radiusToCoverFractionOfPhoton(clus, 0.5);
-	double radius67 = radiusToCoverFractionOfPhoton(clus, 0.67);
-	double radius90 = radiusToCoverFractionOfPhoton(clus, 0.9);
-	if (energy < 1.0) {
-	    // Low-energy photon -- not much information here!
-	    return (radius50 < 10.0);
-	} else if (energy < 2.0) {
-	    // 1 - 2 GeV
-	    return (originIP < 750.0 && radius67 < 10.0 && radius50 < 8.0);
-	} else if (energy < 5.0) {
-	    // 2 - 5 GeV
-	    double maxImpactParameterAtFive = 350.0;
-	    double maxImpactParameterAtTwo  = 600.0;
-	    double maxImpactParameter = maxImpactParameterAtTwo + (maxImpactParameterAtFive-maxImpactParameterAtTwo)*(energy - 2.0)/3.0;
-	    return (originIP < maxImpactParameter && radius67 < 7.0 && radius50 < 4.5);
-	} else if (energy < 10.0) {
-	    // 5-10 GeV
-	    double maxImpactParameterAtTen  = 150.0;
-	    double maxImpactParameterAtFive = 350.0;
-	    double maxImpactParameter = maxImpactParameterAtFive + (maxImpactParameterAtTen-maxImpactParameterAtFive)*(energy - 5.0)/5.0;
-	    return (originIP < maxImpactParameter && radius67 < 6.0 && radius50 < 4.0);
-	} else {
-	    return (originIP < 150.0 && radius67 < 6.0 && radius50 < 4.0);
-	}
-    }
-
     void findStructureInsideCluster(Cluster largeCluster, boolean inECAL, List<Cluster> mipsOldInside, List<Cluster> mipsNewInside, List<Cluster> clumpsInside, HitMap unusedHits) {
 	// Verify
 	if (mipsOldInside.size() != 0 || mipsNewInside.size() != 0 || clumpsInside.size() != 0 || unusedHits.size() != 0) {
@@ -3030,7 +2856,7 @@
 	    } else {
 		radius = m_newMipFinderRadiusHCAL;
 	    }
-	    Clusterer newMipFinder = new TestNewMipFinder(radius);
+	    Clusterer newMipFinder = new org.lcsim.recon.cluster.mipfinder.NonProjectiveMipFinder(radius);
 	    List<Cluster> mipClustersNew = newMipFinder.createClusters(unusedHits);
 	    for (Cluster mip : mipClustersNew) {
 		mipsNewInside.add(mip);
CVSspam 0.2.8