Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDriver.java+245-581.9 -> 1.10
MJC: Reclusterer Mk I: Refactor a bunch of code, add some new utility routines.

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.9 -> 1.10
diff -u -r1.9 -r1.10
--- ReclusterDriver.java	26 Nov 2007 03:05:25 -0000	1.9
+++ ReclusterDriver.java	17 Dec 2007 22:30:44 -0000	1.10
@@ -30,7 +30,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.9 2007/11/26 03:05:25 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.10 2007/12/17 22:30:44 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -38,10 +38,17 @@
 
     List<Cluster> m_mips;
 
+    public void setDebug(boolean debug) {
+	m_debug = debug;
+	m_debugLinkScores = debug;
+	m_debugEoverP = debug;
+    }
+
     boolean m_megaDebug = false;
     boolean m_debug = false;
     boolean m_debugTiming = false;
     boolean m_debugLinkScores = false;
+    boolean m_debugEoverP = false;
     String m_outputParticleListName = "ReclusteredParticles";
     String m_mcList;
     String m_inputSmallClusters;
@@ -58,6 +65,10 @@
     protected LikelihoodEvaluator m_eval = null;
     protected boolean m_cheatOnScoring = false;
 
+    protected ReclusterDriver() {
+	// Gah, debug only!
+    }
+
     public ReclusterDriver(String mcList, String trackList, String muonParticles, String photonClusters, String skeletonClusters, String smallClusters, String unusedHits, String largeClusters, String mips, String clumps, String splitSkeletonClusters, LikelihoodEvaluator eval) {
 	m_eval = eval;
 	m_mcList = mcList;
@@ -72,6 +83,17 @@
 	m_inputClumps = clumps;
 	m_inputSplitSkeletonClusters = splitSkeletonClusters;
 
+	initTrackMatch();
+	initCalibration();
+	initPlots();
+
+	// Print a warning message if cheating
+	if (m_cheatOnScoring) {
+	    System.out.println("WARNING: ReclusterDriver cheating on scoring");
+	}
+    }
+
+    protected void initTrackMatch() {
 	// Track-matching is complex. Use several matchers...
 	// Try matching with local helix extrap to MIP or generic cluster:
 	LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher();
@@ -90,7 +112,9 @@
 	combinedTrackClusterMatcher.addMatcher(localHelixMatchers);
 	combinedTrackClusterMatcher.addMatcher(simpleMatchers);
 	m_trackClusterMatcher = combinedTrackClusterMatcher;
-	
+    }
+
+    protected void initCalibration() {
 	// Calibration
 	FuzzyNeutralHadronClusterEnergyCalculator neutralCalib = new FuzzyNeutralHadronClusterEnergyCalculator();
 	neutralCalib.setMinimumAllowedEnergy(0.0);
@@ -100,14 +124,10 @@
 	add(chargedCalibration);
 	FuzzyPhotonClusterEnergyCalculator photonCalib = new FuzzyPhotonClusterEnergyCalculator();
 	m_photonCalib = photonCalib;
+    }
 
-	// plots
-	initPlots();
-
-	// Print a warning message if cheating
-	if (m_cheatOnScoring) {
-	    System.out.println("WARNING: ReclusterDriver cheating on scoring");
-	}
+    protected void debugProcess(EventHeader event) {
+	super.process(event);
     }
 
     public void process(EventHeader event) {
@@ -559,7 +579,7 @@
 	double thresholdForProximityClump = 75.0; // FIXME: Don't hard-code.
 
 	if (m_cheatOnScoring) {
-	    initPotentialLinks_cheating(linkableClusters);
+	    initPotentialLinks_cheating(linkableClusters, clustersMatchedToTracks);
 	} else {
 	    // Links between main components (clumps, mips) within a large cluster
 	    // -------------------------------------------------------------------
@@ -988,18 +1008,40 @@
 	m_event = null;
     }
 
-    protected void initPotentialLinks_cheating(Collection<Cluster> clusters) {
+    protected void initPotentialLinks_cheating(Collection<Cluster> clusters, Map<Cluster,List<Track>> clustersMatchedToTracks) {
 	Cluster[] tmpArray = new Cluster[1];
 	Cluster[] clusterArray = clusters.toArray(tmpArray);
 	if (clusterArray.length != clusters.size()) { throw new AssertionError("Book-keeping error"); }
 	for (int i=0; i<clusterArray.length; i++) {
 	    Cluster clus1 = clusterArray[i];
 	    MCParticle domPart1 = quoteDominantParticle(clus1);
+	    List<Track> matchedTracks1 = clustersMatchedToTracks.get(clus1);
 	    for (int j=i+1; j<clusterArray.length; j++) {
 		Cluster clus2 = clusterArray[j];
 		if (clus1 == clus2) { throw new AssertionError("Book-keeping error"); }
 	    	MCParticle domPart2 = quoteDominantParticle(clus2);
-		if (domPart1 == domPart2) {
+		// Did it match?
+		boolean goodMatch = (domPart1 == domPart2);
+		if (matchedTracks1 != null) {
+		    for (Track tr : matchedTracks1) {
+			BaseTrackMC trackMC1 = (BaseTrackMC)(tr);
+			MCParticle domPart1_fromTrack = trackMC1.getMCParticle();
+			if (domPart2 == domPart1_fromTrack) { 
+			    goodMatch = true; 
+			}
+		    }
+		}
+		List<Track> matchedTracks2 = clustersMatchedToTracks.get(clus2);
+		if (matchedTracks2 != null) {
+		    for (Track tr : matchedTracks2) {
+			BaseTrackMC trackMC2 = (BaseTrackMC)(tr);
+			MCParticle domPart2_fromTrack = trackMC2.getMCParticle();
+			if (domPart1 == domPart2_fromTrack) { 
+			    goodMatch = true; 
+			}
+		    }
+		}
+		if (goodMatch) {
 		    addPotentialLink(clus1, clus2, 1.0);
 		}
 	    }
@@ -1193,9 +1235,8 @@
 	    double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
 	    HitMap hitsEcal = ((HitMap)(m_event.get("inputHitMapEcal")));
 	    HitMap hitsHcal = ((HitMap)(m_event.get("inputHitMapHcal")));
-	    MCParticle trackTruth = ((BaseTrackMC)(tr)).getMCParticle();
-	    List<CalorimeterHit> allTruthHitsInEcal = findHitsFromTruth_P(trackTruth, hitsEcal.values());
-	    List<CalorimeterHit> allTruthHitsInHcal = findHitsFromTruth_P(trackTruth, hitsHcal.values());
+	    List<CalorimeterHit> allTruthHitsInEcal = findHitsFromTruth_T(tr, hitsEcal.values());
+	    List<CalorimeterHit> allTruthHitsInHcal = findHitsFromTruth_T(tr, hitsHcal.values());
 	    int countCoreHitsEcal = 0;
 	    int countCoreHitsHcal = 0;
 	    int countSharedHitsEcal = 0;
@@ -1257,6 +1298,31 @@
 		}
 	    }
 	    System.out.println("  Track with p="+trackMomentum+" has "+allTruthHitsInEcal.size()+" ECAL + "+allTruthHitsInHcal.size()+" HCAL hits total. Of these, core has "+countCoreHitsEcal+" ECAL + "+countCoreHitsHcal+" HCAL hits and halo has "+countSharedHitsEcal+" ECAL + "+countSharedHitsHcal+" HCAL (weighted to "+countWeightedSharedHitsEcal+" + "+countWeightedSharedHitsHcal+"). Unmatched shared hits: "+countUnmatchedSharedHitsEcal+" ECAL + "+countUnmatchedSharedHitsHcal+" HCAL.");
+	    // MORE DEBUG
+	    System.out.println("Core clusters:");
+	    for (Cluster clus : subClusters) {
+		MCParticle domPart = quoteDominantParticle(clus);
+		System.out.println("   -> "+clus.getCalorimeterHits().size()+" (dominant: "+domPart.getPDGID()+" with p="+domPart.getMomentum().magnitude());
+	    }
+	    // MORE DEBUG
+	    System.out.println("Core contributions:");
+	    Map<MCParticle,Set<CalorimeterHit>> mapTruthToHitSets = new HashMap<MCParticle,Set<CalorimeterHit>>();
+	    for (Cluster clus : subClusters) {
+		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		    SimCalorimeterHit simhit = (SimCalorimeterHit)(hit);
+		    int nMCParticles = simhit.getMCParticleCount();
+		    for (int iMC=0; iMC<nMCParticles; iMC++) {
+			MCParticle part = simhit.getMCParticle(iMC);
+			Set<CalorimeterHit> hitSet = mapTruthToHitSets.get(part);
+			if (hitSet == null) { hitSet = new HashSet<CalorimeterHit>(); mapTruthToHitSets.put(part,hitSet); }
+			hitSet.add(hit);
+		    }
+		}
+	    }
+	    for (MCParticle part : mapTruthToHitSets.keySet()) {
+		Set<CalorimeterHit> hitSet = mapTruthToHitSets.get(part);
+		System.out.println("   -> "+part.getPDGID()+" with p="+part.getMomentum().magnitude()+" has "+hitSet.size()+" core hits.");
+	    }
 	}
 	for (Cluster clus : photonClusters) {
 	    debugPrint("Photon", clus,tracksSortedByMomentum,newMapTrackToShowerComponents,newMapShowerComponentToTrack,allSharedClusters,newMapTrackToVetoedAdditions);
@@ -1304,6 +1370,7 @@
 	return energy(magicCombinedCluster, calib);
     }
     protected BasicCluster makeCombinedCluster(Collection<Cluster> inputClusters) {
+	if (inputClusters == null) { throw new NullPointerException("null cluster collection"); }
 	BasicCluster magicCombinedCluster = new BasicCluster();
 	for (Cluster clus : inputClusters) {
 	    magicCombinedCluster.addCluster(clus);
@@ -1319,7 +1386,39 @@
 	}
 	return output;
     }
-
+    protected BasicCluster makeClusterOfSharedHits(Collection<Cluster> mainClusterList, List<SharedClusterGroup> listOfShares) {
+	Cluster mainCluster = makeCombinedCluster(mainClusterList);
+	return makeClusterOfSharedHits(mainCluster, listOfShares);
+    }
+    protected BasicCluster makeClusterOfSharedHits(Cluster mainCluster, List<SharedClusterGroup> listOfShares) {
+	List<FuzzyCalorimeterHit> fuzzyHits = new Vector<FuzzyCalorimeterHit>();
+	for (SharedClusterGroup shares : listOfShares) {
+	    Cluster fuzzyCluster = buildFuzzyCluster(mainCluster, shares);
+	    for (CalorimeterHit hit : fuzzyCluster.getCalorimeterHits()) {
+		if (hit instanceof FuzzyCalorimeterHit) {
+		    fuzzyHits.add((FuzzyCalorimeterHit)(hit));
+		}
+	    }
+	}
+	Map<CalorimeterHit,Double> weightMap = new HashMap<CalorimeterHit,Double>();
+	for (FuzzyCalorimeterHit fuzzyHit : fuzzyHits) {
+	    double weight = fuzzyHit.getWeight();
+	    CalorimeterHit parent = fuzzyHit.getWrappedHit();
+	    if (weightMap.keySet().contains(parent)) {
+		Double oldWeight = weightMap.get(parent);
+		weight += oldWeight;
+	    }
+	    weightMap.put(parent, weight);
+	}
+	BasicCluster outputCluster = new BasicCluster();
+	for (CalorimeterHit hit : weightMap.keySet()) {
+	    Double weight = weightMap.get(hit);
+	    if (weight > 0.5) {
+		outputCluster.addHit(hit);
+	    }
+	}
+	return outputCluster;
+    }
     protected double proximity(Cluster clus, Hep3Vector point) {
 	if (clus.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
 	double minDist = 0;
@@ -1375,7 +1474,12 @@
     protected double quotePurity_T(List<Track> trackList, Cluster clus, List<SharedClusterGroup> allSharedClusters) {
 	Vector<MCParticle> partList = new Vector<MCParticle>();
 	for (Track tr : trackList) {
-	    partList.add(getMCParticle(tr));
+	    List<MCParticle> particlesOfThisTrack = getMCParticle(tr);
+	    for (MCParticle part : particlesOfThisTrack) {
+		if (!partList.contains(part)) {
+		    partList.add(part);
+		}
+	    }
 	}
 	return quotePurity_P(partList, clus, allSharedClusters);
     }
@@ -1428,7 +1532,12 @@
     protected double quoteEfficiency_T(List<Track> trackList, Cluster clus, List<SharedClusterGroup> allSharedClusters) {
 	Vector<MCParticle> partList = new Vector<MCParticle>();
 	for (Track tr : trackList) {
-	    partList.add(getMCParticle(tr));
+	    List<MCParticle> particlesOfThisTrack = getMCParticle(tr);
+	    for (MCParticle part : particlesOfThisTrack) {
+		if (!partList.contains(part)) {
+		    partList.add(part);
+		}
+	    }
 	}
 	return quoteEfficiency_P(partList, clus, allSharedClusters);
     }
@@ -1554,17 +1663,36 @@
 	return outputHits;
     }
 
-    MCParticle getMCParticle(Track tr) {
-	MCParticle truthPart = null;
+    List<MCParticle> getMCParticle(Track tr) {
+	List<MCParticle> output = new Vector<MCParticle>();
 	if (tr instanceof ReconTrack) {
-	    truthPart = (MCParticle)(((ReconTrack)(tr)).getMCParticle());
+	    output.add((MCParticle)(((ReconTrack)(tr)).getMCParticle()));
+	    return output;
 	} else if (tr instanceof BaseTrackMC) {
-	    truthPart = ((BaseTrackMC)(tr)).getMCParticle();
+	    output.add(((BaseTrackMC)(tr)).getMCParticle());
+	    return output;
+	} else if (tr instanceof MultipleTrackTrack) {
+	    for (Track subTrack : tr.getTracks()) {
+		List<MCParticle> subTrackParticles = getMCParticle(subTrack);
+		for (MCParticle part : subTrackParticles) {
+		    if (part != null) {
+			if (!output.contains(part)) {
+			    output.add(part);
+			}
+		    }
+		}
+	    }
 	}
-	return truthPart;
+	return output;
     }
     List<CalorimeterHit> findHitsFromTruth_T(Track tr, Collection<CalorimeterHit> hits) {
-	return findHitsFromTruth_P(getMCParticle(tr), hits);
+	List<CalorimeterHit> outputHits = new Vector<CalorimeterHit>();
+	List<MCParticle> contributingParticles = getMCParticle(tr);
+	for (MCParticle part : contributingParticles) {
+	    List<CalorimeterHit> hitsOfThisParticle = findHitsFromTruth_P(part, hits);
+	    outputHits.addAll(hitsOfThisParticle);
+	}
+	return outputHits;
     }
     List<CalorimeterHit> findHitsFromTruth_P(MCParticle part, Collection<CalorimeterHit> hitsToSearch) {
 	List<MCParticle> mcList = m_event.get(MCParticle.class, m_mcList);
@@ -1649,7 +1777,7 @@
 	protected Cluster c1 = null;
 	protected Cluster c2 = null;
     }
-    private class ScoredLink extends Link {
+    protected class ScoredLink extends Link {
 	double m_score;
 	public ScoredLink(Cluster clus1, Cluster clus2, double score) {
 	    super(clus1, clus2);
@@ -1685,7 +1813,7 @@
 	    }
 	}
     }
-    private class MomentumSort implements Comparator<Track> {
+    protected class MomentumSort implements Comparator<Track> {
 	public MomentumSort() {}
 	public int compare(Track t1, Track t2) {
 	    double[] p1 = t1.getMomentum();
@@ -1722,18 +1850,20 @@
 	while (numAddedLastPass > 0) {
 	    Set<Cluster> clustersToAddThisPass = new HashSet<Cluster>();
 	    for (Cluster clus : output) {
-		if (clus.getCalorimeterHits().size() < 6 && m_mips.contains(clus)) {
-		    // VETO
-		    if (m_debug) { System.out.println("DEBUG: Skipping daughters of cluster for implied-cluster-addition because it is a mip with only "+clus.getCalorimeterHits().size()+" hits."); }
-		    continue;
+		if (m_mips != null) {
+		    if (clus.getCalorimeterHits().size() < 6 && m_mips.contains(clus)) {
+			// VETO
+			if (m_debug) { System.out.println("DEBUG: Skipping daughters of cluster for implied-cluster-addition because it is a mip with only "+clus.getCalorimeterHits().size()+" hits."); }
+			continue;
+		    }
 		}
 
 		List<ScoredLink> potentialLinks = m_potentialLinks.get(clus);
 		for (ScoredLink testLink : potentialLinks) {
-		    if (testLink.score() >= threshold) { 
+		    if (testLink.score() > threshold) { 
 			// Link involves a cluster we've added and has score >= threshold
 			// so it may point to something we should add.
-			// (Test is >= rather than > because of case where both are 1.0)
+			// (Don't force link in case both are equal... this confuses truth-matching)
 			Cluster potentialNewCluster = testLink.counterpart(clus);
 			if (potentialNewCluster == null) { throw new AssertionError("Null counterpart!"); }
 			if (!allShowerComponents.contains(potentialNewCluster) && !output.contains(potentialNewCluster) && !clustersToAddThisPass.contains(potentialNewCluster)) {
@@ -1752,7 +1882,7 @@
 
     ///////////////////////////////////////
 
-    double deltaEnergy(Cluster mainCluster, SharedClusterGroup shares, ClusterEnergyCalculator calib) {
+    BasicCluster buildFuzzyCluster(Cluster mainCluster, SharedClusterGroup shares) {
 	Set<Cluster> subClusters = recursivelyFindSubClusters(mainCluster);
 	Map<SharedCluster,Double> cacheWeight = new HashMap<SharedCluster,Double>();
 	// Find weights
@@ -1770,7 +1900,6 @@
 		}
 	    }
 	}
-	long timeAtEndOfWeightCalc = Calendar.getInstance().getTimeInMillis(); // DEBUG
 	// Build cluster with weighted hits
 	BasicCluster fuzzyCluster = new BasicCluster();
 	fuzzyCluster.addCluster(mainCluster);
@@ -1782,11 +1911,13 @@
 		fuzzyCluster.addHit(fuzzyHit);
 	    }
 	}
+	return fuzzyCluster;
+    }
+    double deltaEnergy(Cluster mainCluster, SharedClusterGroup shares, ClusterEnergyCalculator calib) {
+	Cluster fuzzyCluster = buildFuzzyCluster(mainCluster, shares);
 	double energyWithoutSharedClusters = energy(mainCluster, calib);
 	double energyWithSharedClusters = energy(fuzzyCluster, calib);
 	double sumDeltaEnergy = energyWithSharedClusters - energyWithoutSharedClusters;
-	long timeAtEndOfEnergyCalc = Calendar.getInstance().getTimeInMillis(); // DEBUG
-	if (m_debugTiming) { System.out.println("DEBUG: Delta energy: Took "+(timeAtEndOfWeightCalc-timeAtStartOfWeightCalc)+" ms to get weights and "+(timeAtEndOfEnergyCalc-timeAtEndOfWeightCalc)+" to get energies"); }
 	return sumDeltaEnergy;
     }
     double energy(Collection<Cluster> mainClusterList, List<SharedClusterGroup> listOfShares) {
@@ -1809,10 +1940,10 @@
 
     ///////////////////////////////////////
 
-    private interface ClusterSharingAlgorithm {
+    protected interface ClusterSharingAlgorithm {
 	public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets);
     }
-    private class SharedClusterGroup {
+    protected class SharedClusterGroup {
 	List<SharedCluster> m_sharedClusters;
 	Map<Cluster, List<SharedCluster>> m_hints;
 	ClusterSharingAlgorithm m_algorithm = null;
@@ -1899,7 +2030,7 @@
 	    }
 	}
     }
-    private class ProximityClusterSharingAlgorithm implements ClusterSharingAlgorithm {
+    protected class ProximityClusterSharingAlgorithm implements ClusterSharingAlgorithm {
 	// Drops off as 1/r^3
 	double m_minimumDistance; // Below this, score is always 1.0
 	double m_maximumDistance; // Above this, score is always 0.0
@@ -1992,7 +2123,7 @@
 	return score;
     }
 
-    private Set<Cluster> iterateOnOneTrack(Track tr, Map<Track,Cluster> tracksMatchedToClusters, List<SharedClusterGroup> allSharedClusters, double threshold, double tolerance, Map<Cluster, Track> newMapShowerComponentToTrack, Set<Cluster> vetoedAdditions) 
+    protected Set<Cluster> iterateOnOneTrack(Track tr, Map<Track,Cluster> tracksMatchedToClusters, List<SharedClusterGroup> allSharedClusters, double threshold, double tolerance, Map<Cluster, Track> newMapShowerComponentToTrack, Set<Cluster> vetoedAdditions) 
     {
 	// Set seeds aside to make sure we don't add some other
 	// track's seed to our cluster.
@@ -2016,10 +2147,12 @@
 	    if (m_debug) {System.out.println("DEBUG: Shower of track with p="+trackMomentum+" has "+componentsInPreviousTier.size()+" clusters in tier "+iTier+". Expanding..."); }
 	    List<Cluster> componentsInNextTier = new Vector<Cluster>();
 	    for (Cluster miniSeed : componentsInPreviousTier) {
-		if (miniSeed != seed && miniSeed.getCalorimeterHits().size() < 6 && m_mips.contains(miniSeed)) {
-		    // VETO
-		    if (m_debug) { System.out.println("DEBUG: Skipping daughters of cluster for cluster-addition because it is a mip with only "+miniSeed.getCalorimeterHits().size()+" hits."); }
-		    continue;
+		if (m_mips != null) {
+		    if (miniSeed != seed && miniSeed.getCalorimeterHits().size() < 6 && m_mips.contains(miniSeed)) {
+			// VETO
+			if (m_debug) { System.out.println("DEBUG: Skipping daughters of cluster for cluster-addition because it is a mip with only "+miniSeed.getCalorimeterHits().size()+" hits."); }
+			continue;
+		    }
 		}
 		long timeAtStartOfThisMiniSeedLoop = Calendar.getInstance().getTimeInMillis(); // DEBUG
 		// Search for links above threshold
@@ -2028,10 +2161,10 @@
 		if (m_debugTiming) { System.out.println("DEBUG: Getting potential links took "+(timeAfterLookingUpPotentialLinks-timeAtStartOfThisMiniSeedLoop)+" ms"); }
 		if (potentialLinks != null) {
 		    for (ScoredLink link : potentialLinks) {
-		    	if (link.score() < threshold) { break ; }
-		    	long timeAtStartOfThisPotentialLink = Calendar.getInstance().getTimeInMillis(); // DEBUG
-		    	Cluster newClus = link.counterpart(miniSeed);
-		    	if (newClus == null) { throw new AssertionError("Null link!"); }
+			if (link.score() < threshold) { break ; }
+			long timeAtStartOfThisPotentialLink = Calendar.getInstance().getTimeInMillis(); // DEBUG
+			Cluster newClus = link.counterpart(miniSeed);
+			if (newClus == null) { throw new AssertionError("Null link!"); }
 		    	// Ensure that we haven't already added component
 		    	long timeBeforePreviousUseTests = Calendar.getInstance().getTimeInMillis(); // DEBUG
 		    	boolean testNotAlreadyInBaseShower = !(allShowerComponents.contains(newClus));
@@ -2083,7 +2216,7 @@
 				    }
 			    	} else {
 				    // Nope -- failed
-				    if (m_debugLinkScores) { System.out.println("DEBUG: Link with score="+link.score()+" to a subcluster with "+newClus.getCalorimeterHits().size()+" hits implied adding a total of "+impliedAdditionalClusters.size()+" clusters -- rejected because new running total would be "+energyOfExistingShowerPlusAdditionalClusters); }
+				    if (m_debugEoverP) { System.out.println("DEBUG: Link with score="+link.score()+" to a subcluster with "+newClus.getCalorimeterHits().size()+" hits implied adding a total of "+impliedAdditionalClusters.size()+" clusters -- rejected because new running total would be "+energyOfExistingShowerPlusAdditionalClusters); }
 				    vetoedAdditions.add(newClus);
 			    	}
 			    }
@@ -2115,7 +2248,7 @@
     }
 
     Map<Cluster, List<ScoredLink>> m_potentialLinks = null;
-    private void resetPotentialLinks() { m_potentialLinks = new HashMap<Cluster, List<ScoredLink>>(); }
+    protected void resetPotentialLinks() { m_potentialLinks = new HashMap<Cluster, List<ScoredLink>>(); }
     private void addPotentialLink(Cluster clus1, Cluster clus2, double score) {
 	List<ScoredLink> linksForClus1 = m_potentialLinks.get(clus1);
 	List<ScoredLink> linksForClus2 = m_potentialLinks.get(clus2);
@@ -2126,7 +2259,7 @@
 	linksForClus2.add(newLink);
     }
     // OLD //Collections.sort(potentialLinks, Collections.reverseOrder(new ScoredLinkSort()));
-    private void sortLinks() {
+    protected void sortLinks() {
 	Collection<List<ScoredLink>> linkLists = m_potentialLinks.values();
 	for (List<ScoredLink> linkList : linkLists) {
 	    Collections.sort(linkList, Collections.reverseOrder(new ScoredLinkSort()));
@@ -2271,7 +2404,7 @@
     ICloud2D m_histo_correctedEnergyEstimateResidualVsMomentum;
     ICloud1D m_histo_correctedEnergyEstimateNormalizedResidual;
     ICloud2D m_histo_correctedEnergyEstimateNormalizedResidualVsMomentum;
-    private void initPlots() {
+    protected void initPlots() {
 	IAnalysisFactory af = IAnalysisFactory.create();
 	try {
 	    m_tree = af.createTreeFactory().create("recluster.aida", "xml", false, true);
@@ -2327,13 +2460,13 @@
 	double realPurity = quotePurity_P(dominantParticle, clus, allSharedClusters);
 	printme += " Dominant particle is "+dominantParticle.getPDGID()+" with p="+dominantParticle.getMomentum().magnitude()+". Core purity="+corePurity+", realPurity="+realPurity;
 	if (tr != null) {
-	    MCParticle truthInfoOfTrack = ((BaseTrackMC)(tr)).getMCParticle();
-	    if (truthInfoOfTrack != dominantParticle) { 
+	    List<MCParticle> truthInfoOfTrack = getMCParticle(tr);
+	    if (! truthInfoOfTrack.contains(dominantParticle)) {
 		printme += " -- MISTAKE"; 
 		boolean clusterComesFromReconstructedTrack = false;
 		for (Track eachTrack : tracksSortedByMomentum) {
-		    MCParticle truthForEachTrack = ((BaseTrackMC)(eachTrack)).getMCParticle();
-		    if (dominantParticle == truthForEachTrack) {
+		    List<MCParticle> truthForEachTrack = getMCParticle(eachTrack);
+		    if (truthForEachTrack.contains(dominantParticle)) {
 			clusterComesFromReconstructedTrack = true;
 			break;
 		    }
@@ -2344,8 +2477,8 @@
 	    }
 	} else {
 	    for (Track matchedTrack : newMapTrackToShowerComponents.keySet()) {
-		MCParticle truthInfoOfTrack = ((BaseTrackMC)(matchedTrack)).getMCParticle();
-		if (truthInfoOfTrack == dominantParticle) { 
+		List<MCParticle> truthInfoOfTrack = getMCParticle(matchedTrack);
+		if (truthInfoOfTrack.contains(dominantParticle)) { 
 		    printme += " -- MISTAKE"; 
 		    break; 
 		}
@@ -2359,6 +2492,60 @@
 		System.out.println("           [vetoed for track with p="+((new BasicHep3Vector(trForVeto.getMomentum())).magnitude())+" for E/p]");
 	    }
 	}
+	
+	if (corePurity < 0.95) {
+	    Map<MCParticle,Set<CalorimeterHit>> mapTruthToHitSets = new HashMap<MCParticle,Set<CalorimeterHit>>();
+	    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		SimCalorimeterHit simhit = (SimCalorimeterHit)(hit);
+		int nMCParticles = simhit.getMCParticleCount();
+		for (int iMC=0; iMC<nMCParticles; iMC++) {
+		    MCParticle part = simhit.getMCParticle(iMC);
+		    Set<CalorimeterHit> hitSet = mapTruthToHitSets.get(part);
+		    if (hitSet == null) { hitSet = new HashSet<CalorimeterHit>(); mapTruthToHitSets.put(part,hitSet); }
+		    hitSet.add(hit);
+		}
+	    }
+	    for (MCParticle part : mapTruthToHitSets.keySet()) {
+		Set<CalorimeterHit> hitSet = mapTruthToHitSets.get(part);
+		System.out.println("           Core contribution: "+part.getPDGID()+" with p="+part.getMomentum().magnitude()+" has "+hitSet.size()+" core hits.");
+	    }
+	}
+    }
+    protected class MultipleTrackTrack extends BaseTrack {
+	protected Collection<Track> m_tracks;
+	public MultipleTrackTrack(Collection<Track> tracks) {
+	    m_tracks = tracks;
+	}
+	public List<Track> getTracks() { return new Vector<Track>(m_tracks); }
+	public int getCharge() {
+	    int chargeSum = 0;
+	    for (Track tr : m_tracks) {
+		chargeSum += tr.getCharge();
+	    }
+	    return chargeSum;
+	}
+	public double[] getMomentum() {
+	    double[] mom = new double[3];
+	    mom[0] = this.getPX();
+	    mom[1] = this.getPY();
+	    mom[2] = this.getPZ();
+	    return mom;
+	}
+	public double getPX() {
+	    double psum = 0.0;
+	    for (Track tr : m_tracks) { psum += tr.getPX(); }
+	    return psum;
+	}
+	public double getPY() {
+	    double psum = 0.0;
+	    for (Track tr : m_tracks) { psum += tr.getPY(); }
+	    return psum;
+	}
+	public double getPZ() {
+	    double psum = 0.0;
+	    for (Track tr : m_tracks) { psum += tr.getPZ(); }
+	    return psum;
+	}
     }
 }
 
CVSspam 0.2.8