Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDTreeDriver.java+324-1041.9 -> 1.10
MJC: (contrib) Refactor, add new ways to merge overlapping showers, improve punch-through detection, find mip/clump structure inside split photons

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.9 -> 1.10
diff -u -r1.9 -r1.10
--- ReclusterDTreeDriver.java	8 Mar 2008 20:51:30 -0000	1.9
+++ ReclusterDTreeDriver.java	11 Mar 2008 17:15:50 -0000	1.10
@@ -34,6 +34,7 @@
     protected boolean m_cheatOnPhotons = false;
 
     protected boolean m_photonDebug = false;
+    protected boolean m_photonSplitDebug = false;
     protected boolean m_jetDebug = false;
 
     protected boolean m_useNewMipFinder = true;
@@ -47,11 +48,15 @@
     protected boolean m_splitPhotonSeeds = true;
     protected boolean m_allPhotonsAreValidSeeds = true;
 
-    protected boolean m_checkSharedHitsForPunchThrough = false;
+    protected boolean m_checkSharedHitsForPunchThrough = true;
     protected boolean m_allowNeutralCalibForEoverP = false;
 
-    protected int m_minHitsToBeTreatedAsClusterECAL = 30;
-    protected int m_minHitsToBeTreatedAsClusterHCAL = 20;
+    protected int m_minHitsToBeTreatedAsClusterECAL = 20;
+    protected int m_minHitsToBeTreatedAsClusterHCAL = 15;
+    protected double m_newMipFinderRadiusECAL = 20.0;
+    protected double m_newMipFinderRadiusHCAL = 50.0;
+    protected int m_punchThroughLayers = 5;
+    protected int m_punchThroughHitMinimum = 4;
 
     protected ICloud1D m_histo_unmatchedTracksMomentum;
     protected ICloud1D m_histo_unmatchedTracksCosTheta;
@@ -122,10 +127,10 @@
 	}
 
 	// Here are the clusterers:
-	Clusterer clumpfinder = new ClumpFinder();
-	Clusterer oldMipfinder = new TrackClusterDriver();
-	Clusterer newMipFinder_ECAL = new TestNewMipFinder(20.0);
-	Clusterer newMipFinder_HCAL = new TestNewMipFinder(50.0);
+	//Clusterer clumpfinder = new ClumpFinder();
+	//Clusterer oldMipfinder = new TrackClusterDriver();
+	//Clusterer newMipFinder_ECAL = new TestNewMipFinder(20.0);
+	//Clusterer newMipFinder_HCAL = new TestNewMipFinder(50.0);
 
 	// Lists of subclusters
 	List<Cluster> mips = new Vector<Cluster>();
@@ -153,61 +158,20 @@
 		minHitsToBeTreatedAsCluster = m_minHitsToBeTreatedAsClusterHCAL ; 
 	    }
 	    // Prepare hitmap, initially holdling all the hits in the cluster
-	    Map<Long, CalorimeterHit> hitsInTree = new HashMap<Long, CalorimeterHit>();
-	    for (CalorimeterHit hit : dTreeCluster.getCalorimeterHits()) {
-		hitsInTree.put(hit.getCellID(), hit);
-	    }
-	    if (hitsInTree.values().contains(null)) { throw new AssertionError("null hit in hitsInTree.values()"); }
-	    // Make mips
-	    // ---------
-	    // First, take a pass with old mipfinder:
-	    List<Cluster> mipClustersOld = null;
-	    if (m_useOldMipFinder) {
-		mipClustersOld = oldMipfinder.createClusters(hitsInTree);
-		for (Cluster mip : mipClustersOld) {
-		    treeOfMip.put(mip, dTreeCluster);
-		    for (CalorimeterHit hit : mip.getCalorimeterHits()) {
-			hitsInTree.remove(hit.getCellID());
-		    }
-		}
-	    } else {
-		mipClustersOld = new Vector<Cluster>(); // empty
-	    }
-	    // Then, take a pass with new mipfinder:
-	    List<Cluster> mipClustersNew = null;
-	    if (m_useNewMipFinder) {
-		if (treeInECAL) {
-		    mipClustersNew = newMipFinder_ECAL.createClusters(hitsInTree);
-		} else {
-		    mipClustersNew = newMipFinder_HCAL.createClusters(hitsInTree);
-		}
-		for (Cluster mip : mipClustersNew) {
-		    treeOfMip.put(mip, dTreeCluster);
-		    for (CalorimeterHit hit : mip.getCalorimeterHits()) {
-			hitsInTree.remove(hit.getCellID());
-		    }
-		}
-	    } else {
-		mipClustersNew = new Vector<Cluster>(); // empty
-	    }
-	    //mipsInTrees.put(dTreeCluster, mipClusters); // BROKEN
-	    mipsOld.addAll(mipClustersOld);
-	    mipsNew.addAll(mipClustersNew);
-	    mips.addAll(mipClustersOld);
-	    mips.addAll(mipClustersNew);
-	    // Make clumps
-	    List<Cluster> clumpClusters = clumpfinder.createClusters(hitsInTree);	    
-	    //clumpsInTrees.put(dTreeCluster, clumpClusters);
-	    clumps.addAll(clumpClusters);
-	    // Remove clump hits from the hitmap so they're not used twice
-	    for (Cluster clump : clumpClusters) {
-		if (clump.getCalorimeterHits().size() == 0) { throw new AssertionError("clump has no hits"); }
-		if (clump.getCalorimeterHits().contains(null)) { throw new AssertionError("null hit in clump"); }
-		treeOfClump.put(clump, dTreeCluster);
-		for (CalorimeterHit hit : clump.getCalorimeterHits()) {
-		    hitsInTree.remove(hit.getCellID());
-		}
-	    }
+	    HitMap hitsInTree = new HitMap();
+	    List<Cluster> mipClustersOld = new Vector<Cluster>();
+	    List<Cluster> mipClustersNew = new Vector<Cluster>();
+	    List<Cluster> clumpClusters  = new Vector<Cluster>();
+
+	    findStructureInsideCluster(dTreeCluster, treeInECAL, mipClustersOld, mipClustersNew, clumpClusters, hitsInTree);
+ 	    mipsOld.addAll(mipClustersOld);
+ 	    mipsNew.addAll(mipClustersNew);
+ 	    mips.addAll(mipClustersOld);
+ 	    mips.addAll(mipClustersNew);
+ 	    clumps.addAll(clumpClusters);
+	    for (Cluster mip : mipClustersOld) { treeOfMip.put(mip, dTreeCluster); }
+	    for (Cluster mip : mipClustersNew) { treeOfMip.put(mip, dTreeCluster); }
+	    for (Cluster clump : clumpClusters) { treeOfClump.put(clump, dTreeCluster); }
 	    int numMipsInTree = mipClustersNew.size() + mipClustersOld.size();
 	    int numClumpsInTree = clumpClusters.size();
 	    if (numMipsInTree==0 && numClumpsInTree==0 && hitsInTree.size()>=minHitsToBeTreatedAsCluster) {
@@ -488,17 +452,73 @@
 		if (modifiedPhotonClusters.contains(clus) && !electronClusters.contains(clus)) {
 		    // Split
 		    List<Track> tracksOfThisPhoton = clustersMatchedToTracks.get(clus);
-		    List<Cluster> splitPhotonPieces = splitPhoton(clus, tracksOfThisPhoton);
-		    // For track match, require at least 4 hits (otherwise just too small)
-		    List<Cluster> splitPhotonPieces_bigEnoughToMatch = new Vector<Cluster>();
-		    List<Cluster> splitPhotonPieces_tooSmallToMatch = new Vector<Cluster>();
-		    for (Cluster subclus : splitPhotonPieces) {
-			if (subclus.getCalorimeterHits().size()>=4) {
-			    splitPhotonPieces_bigEnoughToMatch.add(subclus);
+		    Map<Track,Cluster> mapTrackToSplitPhotonCluster = new HashMap<Track,Cluster>();
+		    List<Cluster> splitPhotonPieces = splitPhoton(clus, tracksOfThisPhoton, mapTrackToSplitPhotonCluster);
+		    Set<Cluster> splitPhotonPieces_matched = new HashSet<Cluster>();
+		    splitPhotonPieces_matched.addAll(mapTrackToSplitPhotonCluster.values());
+		    List<Cluster> splitPhotonPieces_photonSmall = new Vector<Cluster>();
+		    List<Cluster> splitPhotonPieces_photonLarge = new Vector<Cluster>();
+		    List<Cluster> splitPhotonPieces_mip = new Vector<Cluster>();
+		    List<Cluster> splitPhotonPieces_clump = new Vector<Cluster>();
+		    for (Cluster piece : splitPhotonPieces) {
+			if (splitPhotonPieces_matched.contains(piece)) {
+			    // Split further
+			    HitMap unusedHits = new HitMap();
+			    List<Cluster> mipClustersOld = new Vector<Cluster>();
+			    List<Cluster> mipClustersNew = new Vector<Cluster>();
+			    List<Cluster> clumpClusters  = new Vector<Cluster>();
+			    findStructureInsideCluster(piece, true, mipClustersOld, mipClustersNew, clumpClusters, unusedHits);
+			    if (mipClustersOld.size()>0 || mipClustersNew.size()>0 || clumpClusters.size()>0) {
+				// Found substructure
+				mipsOld.addAll(mipClustersOld);
+				mipsNew.addAll(mipClustersNew);
+				mips.addAll(mipClustersOld);
+				mips.addAll(mipClustersNew);
+				clumps.addAll(clumpClusters);
+				for (Cluster mip : mipClustersOld) { treeOfMip.put(mip, null); } // hmm...
+				for (Cluster mip : mipClustersNew) { treeOfMip.put(mip, null); } // hmm...
+				for (Cluster clump : clumpClusters) { treeOfClump.put(clump, null); } // hmm...
+				splitPhotonPieces_mip.addAll(mipClustersNew);
+				splitPhotonPieces_mip.addAll(mipClustersOld);
+				splitPhotonPieces_clump.addAll(clumpClusters);
+				if (unusedHits.size()>0) {
+				    BasicCluster blobOfRemainingHits = new BasicCluster();
+				    for (CalorimeterHit hit : unusedHits.values()) {
+					blobOfRemainingHits.addHit(hit);
+				    }
+				    if (blobOfRemainingHits.getCalorimeterHits().size()>=4) {
+					splitPhotonPieces_photonLarge.add(blobOfRemainingHits);
+				    } else {
+					splitPhotonPieces_photonSmall.add(blobOfRemainingHits);
+				    }
+				}
+			    } else {
+				// No substructure found
+				if (piece.getCalorimeterHits().size()>=4) {
+				    splitPhotonPieces_photonLarge.add(piece);
+				} else {
+				    splitPhotonPieces_photonSmall.add(piece);
+				}
+			    }
 			} else {
-			    splitPhotonPieces_tooSmallToMatch.add(subclus);
+			    if (piece.getCalorimeterHits().size()>=4) {
+				splitPhotonPieces_photonLarge.add(piece);
+			    } else {
+				splitPhotonPieces_photonSmall.add(piece);
+			    }
 			}
 		    }
+		    List<Cluster> splitPhotonPieces_final = new Vector<Cluster>();
+		    splitPhotonPieces_final.addAll(splitPhotonPieces_photonSmall);
+		    splitPhotonPieces_final.addAll(splitPhotonPieces_photonLarge);
+		    splitPhotonPieces_final.addAll(splitPhotonPieces_mip);
+		    splitPhotonPieces_final.addAll(splitPhotonPieces_clump);
+		    // [prepare to match tracks]
+		    List<Cluster> splitPhotonPieces_bigEnoughToMatch = new Vector<Cluster>();
+		    splitPhotonPieces_bigEnoughToMatch.addAll(splitPhotonPieces_photonLarge);
+		    splitPhotonPieces_bigEnoughToMatch.addAll(splitPhotonPieces_mip);
+		    splitPhotonPieces_bigEnoughToMatch.addAll(splitPhotonPieces_clump);
+		    // [match tracks]
 		    Map<Track,Cluster> localTrackMap = new HashMap<Track,Cluster>();
 		    Map<Cluster,List<Track>> localReverseTrackMap = new HashMap<Cluster,List<Track>>();
 		    Set<Cluster> newSeeds = new HashSet<Cluster>();
@@ -513,29 +533,37 @@
 		    modifiedPhotonClusters.remove(clus);
 		    chargedHadronLikePhotons.remove(clus);
 		    photonLikePhotons.remove(clus);
-		    // [blot out existing things
+		    // [blot out existing things]
 		    clustersMatchedToTracks.remove(clus);
 		    for (Track tmpTr : tracksOfThisPhoton) {
 		    	tracksMatchedToClusters.remove(tmpTr);
 		    }
 		    // [go]
-		    for (Cluster subclus : splitPhotonPieces) {
+		    for (Cluster subclus : splitPhotonPieces_final) {
 			if (newSeeds.contains(subclus)) {
-			    treesWithNoStructure.add(subclus); // hack
+			    if (splitPhotonPieces_mip.contains(subclus) || splitPhotonPieces_clump.contains(subclus)) {
+				// [stuff] 
+			    } else {
+				treesWithNoStructure.add(subclus); // hack
+			    }
 			    List<Track> matchedTracks = localReverseTrackMap.get(subclus);
 			    clustersMatchedToTracks.put(subclus, matchedTracks);
 			    for (Track tmpTr : matchedTracks) {
 				tracksMatchedToClusters.put(tmpTr, subclus);
 			    }
-			} else if (subclus.getCalorimeterHits().size()<4) {
-			    // Too small -- share it
-			    photonFragments.add(subclus);
-			} else {
-			    // Need to ID it as photon or otherwise
-			    // Er - that's really hard. Hm.
+			} else if (splitPhotonPieces_photonLarge.contains(subclus)) {
 			    // For now lump them all as charged hadron-like photons (lazy & not really right)
 			    chargedHadronLikePhotons.add(subclus);
 			    modifiedPhotonClusters.add(subclus);
+			} else if (splitPhotonPieces_photonSmall.contains(subclus)) {
+			    // Too small -- share it
+			    photonFragments.add(subclus);
+			} else {
+			    // Already flagged as mip or clump.
+			    // Double-check...
+			    if (!mips.contains(subclus) && !clumps.contains(subclus)) {
+				throw new AssertionError("Book-keeping error");
+			    }
 			}
 		    }
 		}			
@@ -869,10 +897,50 @@
 	    // Look for tracks which
 	    //   * Have E/p significantly below 1, and/or
 	    //   * Could be linked to a cluster but were vetoed, PROVIDED THAT the cluster was not put in any other shower
+	    //
+	    // What about this case?
+	    //   * Track A has momentum less than track B.
+	    //   * Track B has a shower with an early/critical link with score 0.8
+	    //   * Track A steals the piece being linked to (score 0.9, avoiding veto)
+	    //   * Track B tries to build a shower but is blocked because it can't connect
+	    //     to the necessary piece (it's been taken by track A).
+	    // Characteristics:
+	    //   * E/p dramatically too low for a high-momentum track (that isn't punch-through)
+	    //   * Good link for the high-momentum track that's vetoed due to track seed (cluster in use).
+	    //     [Probably there will be multiple such links...]
+	    // Note that this is NOT the same as looking for a vetoed link to an UNUSED cluster.
+	    //
+	    // And what about this case?
+	    //   * Track A has momentum less than track B
+	    //   * Track A picks up cluster #1
+	    //   * Cluster #1 has a high-score link to cluster #2, but the link is
+	    //     vetoed because it would take Track A over the E/p limit
+	    //   * Track B tries to pick up cluster #2, but it is vetoed because
+	    //     doing so would require it to pick up track A's seed.
+	    // For example:
+	    //   5 GeV track A ---->---|===== [cluster 1, 3 GeV]
+	    //                         |
+	    //                         |      [cluster 2, 25 GeV]
+	    //                         |
+	    //  50 GeV track B ---->---|===== [cluster 3, 20 GeV]
+	    // Hm... I *think* this case may be handled...
+	    //
+	    // And what about this case?
+	    //   * Track A has momentum less than track B
+	    //   * Track A picks up its own shower plus a critical early cluster(s) of track B
+	    //   * Track B never gets off the ground.
+	    // Hmm... but then track A would see a vetoed-for-seed link.
+	    // Perhaps we should look at it that way around:
+	    //   * E/p dramatically too low for track B
+	    //   * E/p may be OK for track A
+	    //   * Track A was vetoed for connection to cluster due to seed (track B)
+
 	    Set<Track> tracksWithEoverPTooLow = new HashSet<Track>();
+	    Set<Track> tracksWithEoverPMuchTooLow = new HashSet<Track>();
 	    Set<Track> tracksWithVetoedLinkToUnusedCluster = new HashSet<Track>();
 	    List<Set<Track>> jetLinks = new Vector<Set<Track>>();
 
+	    // Look for tracks with E/p too low
 	    for (Track tr : tracksSortedByMomentum) {
 		Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
 		if (showerComponents == null || showerComponents.size()==0) {
@@ -891,11 +959,19 @@
 		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
 		double sigma = 0.7 * Math.sqrt(trackMomentum);
 		double normalizedResidual = (clusterEnergy-trackMomentum)/sigma;
-		if (normalizedResidual < -1.0) {
+		boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
+		if (normalizedResidual < -1.0 && !punchThrough) {
 		    // FIXME: This cut-off shouldn't be hard-coded
 		    tracksWithEoverPTooLow.add(tr);
 		}
+		if (normalizedResidual < -3.0 && !punchThrough) {
+		    // FIXME: This cut-off shouldn't be hard-coded
+		    tracksWithEoverPMuchTooLow.add(tr);
+		}
 	    }
+
+	    // Look for clusters which were not linked to a track 
+	    // AND which were vetoed for E/p with one of the tracks with E/p too low
 	    for (Cluster clus : linkableClusters) {
 		if (newMapShowerComponentToTrack.get(clus) == null) {
 		    Set<Track> tracksVetoedForThisCluster = new HashSet<Track>();
@@ -916,7 +992,7 @@
 		    if (tracksWithEoverPTooLowVetoedForThisCluster.size()>1) {
 			jetLinks.add(tracksWithEoverPTooLowVetoedForThisCluster);
 		    }
-		    if (iIter==0) {
+		    if (m_jetDebug && iIter==0) {
 			if (tracksWithEoverPTooLowVetoedForThisCluster.size()>1) {
 			    System.out.println("DEBUG: Unlinked cluster ("+clus.getCalorimeterHits().size()+" hits) was vetoed for E/p when linking to "+tracksWithEoverPTooLowVetoedForThisCluster.size()+" track showers:");
 			    for (Track vetoedTrack : tracksWithEoverPTooLowVetoedForThisCluster) {
@@ -933,6 +1009,77 @@
 		}
 	    }
 
+	    // For tracks with E/p much too low, look for links that were vetoed due
+	    // to track seed but have score > 0.7. If there are multiple, we go with
+	    // just one, maximising
+	    //   [hits in clus 1] * [hits in clus 2] * [link score]
+	    // Question: What if we need to make more than one such link?
+	    for (Track tr : tracksWithEoverPMuchTooLow) {
+		if (m_jetDebug) { System.out.println("DEBUG: Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" has E/p much too low..."); }
+		Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+		Set<Cluster> vetoedClustersDueToTrackSeedForThisTrack = mapTrackToVetoedAdditionsDueToTrackSeed.get(tr);
+		double bestFigureOfMerit = 0.0;
+		Cluster bestMatch = null;
+		for (Cluster vetoedClus : vetoedClustersDueToTrackSeedForThisTrack) {
+		    Track trackOfVetoedClus = newMapShowerComponentToTrack.get(vetoedClus);
+		    if (trackOfVetoedClus != null) {
+			List<ScoredLink> potentialLinks = m_potentialLinks.get(vetoedClus);
+			for (ScoredLink link : potentialLinks) {
+			    Cluster partner = link.counterpart(vetoedClus);
+			    if (showerComponents.contains(partner) && link.score() > 0.7) {
+				// Candidate
+				double figureOfMerit = ((double)(vetoedClus.getCalorimeterHits().size())) * ((double)(partner.getCalorimeterHits().size())) * link.score();
+				if (m_jetDebug) { System.out.println("DEBUG: Candidate link: clus["+partner.getCalorimeterHits().size()+"] in shower -> clus["+vetoedClus.getCalorimeterHits().size()+"] from other shower linked to track with p="+(new BasicHep3Vector(trackOfVetoedClus.getMomentum())).magnitude()+"... score="+link.score()+" so figure of merit is "+figureOfMerit); }
+				if (bestMatch == null || figureOfMerit > bestFigureOfMerit) {
+				    bestFigureOfMerit = figureOfMerit;
+				    bestMatch = vetoedClus;
+				}
+			    }
+			}
+		    }
+		}
+		if (bestMatch != null) {
+		    Set<Track> newJetLink = new HashSet<Track>();
+		    newJetLink.add(tr);
+		    newJetLink.add(newMapShowerComponentToTrack.get(bestMatch));
+		    jetLinks.add(newJetLink); // TEST
+		    if (m_jetDebug) {
+			String printme = new String("DEBUG: Made new jet link: { ");
+			for (Track tmptr : newJetLink) {
+			    printme += "p="+(new BasicHep3Vector(tmptr.getMomentum())).magnitude()+" ";
+			}
+			printme += "}";
+			System.out.println(printme);
+		    }
+		}
+
+		// Similar but different case:
+		Set<Track> mergeThese = new HashSet<Track>();
+		for (Track otherTrack : tracksSortedByMomentum) {
+		    if (otherTrack == tr) { continue; }
+		    Set<Cluster> otherTrackShowerClusters = newMapTrackToShowerComponents.get(otherTrack);
+		    Set<Cluster> vetoedClustersDueToTrackSeedForOtherTrack = mapTrackToVetoedAdditionsDueToTrackSeed.get(otherTrack);
+		    for (Cluster vetoedClus : vetoedClustersDueToTrackSeedForOtherTrack) {
+			if (showerComponents.contains(vetoedClus)) {
+			    // Found a possible candidate. What was the score?
+			    List<ScoredLink> potentialLinks = m_potentialLinks.get(vetoedClus);
+			    for (ScoredLink link : potentialLinks) {
+				Cluster partner = link.counterpart(vetoedClus);
+				if (otherTrackShowerClusters.contains(partner) && link.score()>0.7) {
+				    mergeThese.add(otherTrack);
+				}
+			    }
+			}
+		    }
+		}
+		if (mergeThese.size()>0) {
+		    mergeThese.add(tr);
+		    jetLinks.add(mergeThese); // TEST
+		}
+	    }
+
+	    
+
 	    ///////////////////////////////////////////////////////////////////////////////////
 
 	    // Try to cluster as jet (experimental)
@@ -970,7 +1117,7 @@
 	    }
 
 	    if (m_clusterAsJets) {
-		if (m_debug) {
+		if (m_jetDebug) {
 		    System.out.println("DEBUG: Found "+jets.size()+" jets:");
 		    for (Set<Track> jet : jets) {
 			System.out.println("DEBUG:   -> "+jet.size()+" tracks:");
@@ -1062,7 +1209,9 @@
 		}
 		if (m_clusterAsJets) {
 		    double jetShowerEnergy = energy(allShowerComponents, allSharedClusters);
-		    System.out.println("DEBUG: Reconstructed jet with "+jet.size()+" tracks (total momentum "+totalJetMomentum.magnitude()+") -> shower with E="+jetShowerEnergy);
+		    if (m_jetDebug) {
+			System.out.println("DEBUG: Reconstructed jet with "+jet.size()+" tracks (total momentum "+totalJetMomentum.magnitude()+") -> shower with E="+jetShowerEnergy);
+		    }
 		}
 	    }
 
@@ -1311,18 +1460,11 @@
 		double clusterEnergy = energy(showerComponents, allSharedClusters);
 		double clusterEnergyNeutralCalib = energy(showerComponents, allSharedClusters, m_neutralCalib);
 		double sigma = 0.7 * Math.sqrt(jetMomentumMag);
-		// Check for punch-through
-		Cluster clusterToCheckForPunchThrough = null;
-		if (m_checkSharedHitsForPunchThrough) {
-		    clusterToCheckForPunchThrough = makeClusterOfSharedHits(showerComponents, allSharedClusters);
-		} else {
-		    clusterToCheckForPunchThrough = makeCombinedCluster(showerComponents);
-		}
-		boolean fiveHitsInLastFiveLayers = (countHitsInLastLayersOfHcal(clusterToCheckForPunchThrough, 5) >= 5);
+		// Check for punch-through, E/p
 		double normalizedResidual = (clusterEnergy-jetMomentumMag)/sigma;
 		double normalizedResidualNeutralCalib = (clusterEnergyNeutralCalib-jetMomentumMag)/sigma;
-		boolean punchThrough = fiveHitsInLastFiveLayers;
-		if (fiveHitsInLastFiveLayers) { punchThroughJets.add(jet); }
+		boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
+		if (punchThrough) { punchThroughJets.add(jet); }
 		//System.out.println("DEBUG: Jet has punch-through = "+punchThrough+", #hits = "+countHitsInLastLayersOfHcal(showerComponents, 5));
 		boolean passesUpperBoundEoverP = (normalizedResidual < windowEoverPveto);
 		boolean passesLowerBoundEoverP = (normalizedResidual > -windowEoverPveto);
@@ -1381,17 +1523,10 @@
 		// Don't lowball uncertainty for low-momentum tracks
 		sigma = 0.7; 
 	    } 
-	    Cluster clusterToCheckForPunchThrough = null;
-	    if (m_checkSharedHitsForPunchThrough) {
-		clusterToCheckForPunchThrough = makeClusterOfSharedHits(showerComponents, allSharedClusters);
-	    } else {
-		clusterToCheckForPunchThrough = makeCombinedCluster(showerComponents);
-	    }
-	    boolean fiveHitsInLastFiveLayers = (countHitsInLastLayersOfHcal(clusterToCheckForPunchThrough, 5) >= 5);
 	    double normalizedResidual = (clusterEnergy-trackMomentumMag)/sigma;
 	    double normalizedResidualNeutralCalib = (clusterEnergyNeutralCalib-trackMomentumMag)/sigma;
-	    boolean punchThrough = fiveHitsInLastFiveLayers;
-	    if (fiveHitsInLastFiveLayers) { punchThroughTracks.add(tr); }
+	    boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
+	    if (punchThrough) { punchThroughTracks.add(tr); }
 	    boolean passesUpperBoundEoverP = (normalizedResidual < windowEoverPveto);
 	    boolean passesLowerBoundEoverP = (normalizedResidual > -windowEoverPveto);
 	    boolean passesLowerBoundEoverP_neutralCalib = (normalizedResidualNeutralCalib > -windowEoverPveto);
@@ -2699,10 +2834,10 @@
 	}
     }
 
-    protected List<Cluster> splitPhoton(Cluster photon, List<Track> tracksOfThisPhoton) {
+    protected List<Cluster> splitPhoton(Cluster photon, List<Track> tracksOfThisPhoton, Map<Track,Cluster> mapTrackToCluster) {
 	// Output
 	List<Cluster> output = new Vector<Cluster>();
-	Map<Track,Cluster> mapTrackToCluster = new HashMap<Track,Cluster>();
+	if (mapTrackToCluster == null || mapTrackToCluster.size() != 0) { throw new AssertionError("Must supply initialized, blank map"); }
 
 	// Organize hits by layer.
 	int minLayer = -1;
@@ -2793,6 +2928,7 @@
 
 	// Start building from track seeds:
 	for (Track tr : tracksOfThisPhoton) {
+	    if (m_photonSplitDebug) { System.out.println("DEBUG: Attempting to build cluster for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()); }
 	    CalorimeterHit seed = mapTrackToTrackSeed.get(tr);
 	    if (seed != null) {
 		if (unusedHitsByLayer.get(getLayer(seed)).contains(seed)) {
@@ -2801,11 +2937,13 @@
 		    Cluster protoCluster = buildProtoCluster(seed, unusedHitsByLayer, tangent);
 		    output.add(protoCluster);
 		    mapTrackToCluster.put(tr, protoCluster);
+		    if (m_photonSplitDebug) { debugPrintProtoCluster(protoCluster, photon); }
 		}
 	    }
 	}
 
 	// Start building from non-track seeds:
+	if (m_photonSplitDebug) { System.out.println("DEBUG: Attempting to build clusters from non-track seeds:"); }
 	for (int iLayer=minLayer; iLayer<=maxLayer; iLayer++) {
 	    Set<CalorimeterHit> unusedHitsInCurrentLayer = unusedHitsByLayer.get(iLayer);
 	    while (unusedHitsInCurrentLayer != null && unusedHitsInCurrentLayer.size()>0) {
@@ -2814,11 +2952,31 @@
 		Hep3Vector tangentAtSeed = VecOp.unit(seedPosition);
 		Cluster protoCluster = buildProtoCluster(seed, unusedHitsByLayer, tangentAtSeed);
 		output.add(protoCluster);
+		if (m_photonSplitDebug) { debugPrintProtoCluster(protoCluster, photon); }
 	    }
 	}
 	return output;
     }
 
+    void debugPrintProtoCluster(Cluster clus, Cluster parent) {
+	List<CalorimeterHit> protoCluster = clus.getCalorimeterHits();
+	Map<MCParticle, List<SimCalorimeterHit>> truthMap = truthFromHitList(parent.getCalorimeterHits());
+        int firstLayer = -1;
+	for (CalorimeterHit hit : protoCluster) {
+	    if (firstLayer == -1 || getLayer(hit) < firstLayer) {
+		firstLayer = getLayer(hit);
+	    }
+	}
+	String printme = new String("DEBUG: Built proto-cluster ");
+	printme += "with "+protoCluster.size()+" hits starting in layer "+firstLayer+":";
+	System.out.println(printme);
+	Map<MCParticle, List<SimCalorimeterHit>> truthMapForProtoCluster = truthFromHitList(protoCluster);
+	for (MCParticle part : truthMapForProtoCluster.keySet()) {
+	    System.out.println("DEBUG:   -> MC particle with ID="+part.getPDGID()+" and p="+part.getMomentum().magnitude()+" has "
+		+truthMapForProtoCluster.get(part).size()+"/"+truthMap.get(part).size()+" hits");
+	}
+    }
+
     Cluster buildProtoCluster(CalorimeterHit seed, Map<Integer, Set<CalorimeterHit>> unusedHitsByLayer, Hep3Vector tangentAtSeed) {
 	int iLayer = getLayer(seed);
 	Set<CalorimeterHit> protoCluster = new HashSet<CalorimeterHit>();
@@ -2923,4 +3081,66 @@
 	    return (originIP < 150.0 && radius67 < 6.0 && radius50 < 4.0);
 	}
     }
+
+    void findStructureInsideCluster(Cluster largeCluster, boolean inECAL, List<Cluster> mipsOldInside, List<Cluster> mipsNewInside, List<Cluster> clumpsInside, HitMap unusedHits) {
+	// Verify
+	if (mipsOldInside.size() != 0 || mipsNewInside.size() != 0 || clumpsInside.size() != 0 || unusedHits.size() != 0) {
+	    throw new AssertionError("Empty, non-null input lists required.");
+	}
+	// Book-keeping
+	for (CalorimeterHit hit : largeCluster.getCalorimeterHits()) {
+	    unusedHits.put(hit.getCellID(), hit);
+	}
+	// MIPs (old)
+	if (m_useOldMipFinder) {
+	    Clusterer oldMipfinder = new TrackClusterDriver();
+	    List<Cluster> mipClustersOld = oldMipfinder.createClusters(unusedHits);
+	    for (Cluster mip : mipClustersOld) {
+		mipsOldInside.add(mip);
+		for (CalorimeterHit hit : mip.getCalorimeterHits()) {
+		    unusedHits.remove(hit.getCellID());
+		}
+	    }
+	}
+	// MIPs (new)
+	if (m_useNewMipFinder) {
+	    double radius = 0.0;
+	    if (inECAL) {
+		radius = m_newMipFinderRadiusECAL;
+	    } else {
+		radius = m_newMipFinderRadiusHCAL;
+	    }
+	    Clusterer newMipFinder = new TestNewMipFinder(radius);
+	    List<Cluster> mipClustersNew = newMipFinder.createClusters(unusedHits);
+	    for (Cluster mip : mipClustersNew) {
+		mipsNewInside.add(mip);
+		for (CalorimeterHit hit : mip.getCalorimeterHits()) {
+		    unusedHits.remove(hit.getCellID());
+		}
+	    }
+	}
+	// Clumps
+	Clusterer clumpfinder = new ClumpFinder();
+	List<Cluster> clumpClusters = clumpfinder.createClusters(unusedHits);	    
+	clumpsInside.addAll(clumpClusters);
+	for (Cluster clump : clumpClusters) {
+	    if (clump.getCalorimeterHits().size() == 0) { throw new AssertionError("clump has no hits"); }
+	    if (clump.getCalorimeterHits().contains(null)) { throw new AssertionError("null hit in clump"); }
+	    for (CalorimeterHit hit : clump.getCalorimeterHits()) {
+		unusedHits.remove(hit.getCellID());
+	    }
+	}
+    }
+
+    boolean isPunchThrough(Set<Cluster> showerComponents, List<SharedClusterGroup> allSharedClusters) {
+	Cluster clusterToCheckForPunchThrough = null;
+	if (m_checkSharedHitsForPunchThrough) {
+	    clusterToCheckForPunchThrough = makeClusterOfSharedHits(showerComponents, allSharedClusters);
+	} else {
+	    clusterToCheckForPunchThrough = makeCombinedCluster(showerComponents);
+	}
+	int hitCount = countHitsInLastLayersOfHcal(clusterToCheckForPunchThrough, m_punchThroughLayers);
+	return (hitCount >= m_punchThroughHitMinimum);
+
+    }
 }
CVSspam 0.2.8