lcsim/src/org/lcsim/contrib/uiowa
diff -u -r1.57 -r1.58
--- ReclusterDTreeDriver.java 15 Oct 2008 22:10:25 -0000 1.57
+++ ReclusterDTreeDriver.java 16 Oct 2008 00:32:22 -0000 1.58
@@ -35,7 +35,7 @@
* in this package, which uses the implementation in
* org.lcsim.recon.cluster.directedtree developed by NIU).
*
- * @version $Id: ReclusterDTreeDriver.java,v 1.57 2008/10/15 22:10:25 mcharles Exp $
+ * @version $Id: ReclusterDTreeDriver.java,v 1.58 2008/10/16 00:32:22 mcharles Exp $
* @author Mat Charles <[log in to unmask]>
*/
@@ -306,6 +306,8 @@
}
}
+ /*
+
// Handle photons
List<Cluster> chargedHadronLikePhotons = new Vector<Cluster>();
List<Cluster> modifiedPhotonClusters = new Vector<Cluster>();
@@ -440,36 +442,28 @@
if (m_splitPhotonSeeds) {
splitPhotonSeeds(clustersMatchedToTracks, tracksMatchedToClusters, modifiedPhotonClusters, electronClusters, photonLikePhotons, chargedHadronLikePhotons, photonFragments, mipsOld, mipsNew, mips, clumps, treesWithNoStructure);
}
+
+ */
+
// Unmatched tracks
List<Track> unmatchedTracks = new Vector<Track>();
unmatchedTracks.addAll(trackList);
unmatchedTracks.removeAll(tracksMatchedToClusters.keySet());
List<Track> unmatchedTracksThatDontReachCalorimeter = new Vector<Track>();
for (Track tr : unmatchedTracks) {
- LocalHelixExtrapolationTrackClusterMatcher debugTrackMatch = new LocalHelixExtrapolationTrackClusterMatcher(m_findCluster);
- debugTrackMatch.process(m_event);
- Cluster debugMatchedCluster = debugTrackMatch.matchTrackToCluster(tr, allMatchableClusters);
- if (debugMatchedCluster != null) {
- // This can happen sometimes if it pointed to a photon that was broken up severely.
- // In any case, it clearly pointed to the calorimeter so we shouldn't
- // add on the track momentum (that would be double-counting)
+ HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
+ Hep3Vector interceptPoint = null;
+ if (result != null) {
+ interceptPoint = result.getInterceptPoint();
+ }
+ if (interceptPoint == null) {
+ // No valid extrap to calorimeter
+ unmatchedTracksThatDontReachCalorimeter.add(tr);
} else {
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Hep3Vector interceptPoint = null;
- if (result != null) {
- interceptPoint = result.getInterceptPoint();
- }
- if (interceptPoint == null) {
- // No valid extrap to calorimeter
- unmatchedTracksThatDontReachCalorimeter.add(tr);
- } else {
- // Reached calorimeter, but no match => we'll treat as NH or similar
- }
+ // Reached calorimeter, but no match => we'll treat as NH or similar
}
}
- System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:"); // FIXME
-
if (m_debug) {
System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
for (Track tr : unmatchedTracks) {
@@ -477,29 +471,7 @@
}
}
- // Now done earlier:
/*
- ShowerPointFinder showerFinder = new ShowerPointFinder(m_findCluster, allHits, tracksMatchedToClusters);
- Map<Track,BasicCluster> MapTrkToMIP = showerFinder.findMips();
- event.put("ShowerFinderMapTrackToMip", MapTrkToMIP);
- List<Cluster> preShowerMips = new Vector<Cluster>();
- preShowerMips.addAll(MapTrkToMIP.values());
- event.put("ShowerFinderMips", preShowerMips);
- event.getMetaData(preShowerMips).setTransient(true);
- */
-
- //Electron write out
- event.put("ElectronList", electronTracks);
- if(m_electronDebug) aida.cloud1D("electron size",100000).fill(electronTracks.size());
-
- // Figure out whether tracks were uniquely matched or not:
- Set<Track> uniquelyMatchedTracks = new HashSet<Track>();
- Set<Track> ambiguouslyMatchedTracks = new HashSet<Track>();
- Map<Track, Cluster> tweakedTracksMatchedToClusters = new HashMap<Track,Cluster>();
- Map<Cluster, Track> clustersMatchedToTweakedTracks = new HashMap<Cluster, Track>();
- List<Track> tweakedTracks = new Vector<Track>();
- Map<Track, Track> mapOrigTrackToTweakedTrack = new HashMap<Track,Track>();
- handleTrackMatchingAmbiguities(clustersMatchedToTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracksMatchedToClusters, clustersMatchedToTweakedTracks, tweakedTracks, mapOrigTrackToTweakedTrack);
// Check for cases where seed cluster is just plain too big
List<Track> flaggedBadTracks = new Vector<Track>();
@@ -665,6 +637,8 @@
}
}
+ */
+
// Track seeds
List<Cluster> seedLeftoverHitClusters = new Vector();
List<Cluster> nonSeedLeftoverHitClusters = new Vector();
@@ -677,34 +651,18 @@
}
if ( seedLeftoverHitClusters.size() + nonSeedLeftoverHitClusters.size() != leftoverHitClusters.size() ) { throw new AssertionError("Book-keeping error"); }
- List<Cluster> seedElectronClusters = new Vector();
- List<Cluster> seedHadronLikePhotonClusters = new Vector();
- List<Cluster> nonSeedHadronLikePhotonClusters = new Vector();
- List<Cluster> nonSeedPhotonLikePhotonClusters = new Vector();
- for (Cluster clus : electronClusters) {
- if (clustersMatchedToTracks.keySet().contains(clus)) {
- seedElectronClusters.add(clus);
- } else {
- throw new AssertionError("Electron cluster is not a seed!");
- }
- }
- for (Cluster clus : photonLikePhotons) {
- if (clustersMatchedToTracks.keySet().contains(clus)) {
- throw new AssertionError("Photon cluster is a seed!");
- } else {
- nonSeedPhotonLikePhotonClusters.add(clus);
- }
- }
- for (Cluster clus : chargedHadronLikePhotons) {
+ List<Cluster> seedPhotonClusters = new Vector();
+ List<Cluster> nonSeedPhotonClusters = new Vector();
+ for (Cluster clus : photons) {
if (clustersMatchedToTracks.keySet().contains(clus)) {
- seedHadronLikePhotonClusters.add(clus);
+ seedPhotonClusters.add(clus);
} else {
- nonSeedHadronLikePhotonClusters.add(clus);
+ nonSeedPhotonClusters.add(clus);
}
}
- if (seedElectronClusters.size() + seedHadronLikePhotonClusters.size() + nonSeedHadronLikePhotonClusters.size() + nonSeedPhotonLikePhotonClusters.size() != modifiedPhotonClusters.size() ) { throw new AssertionError("Book-keeping error"); }
- // DEBUG
+ /*
+
if (m_checkForPhotonTrackOverlap) {
System.out.print("DEBUG: BEFORE: ");
countHits(mipsOld, "mipsOld");
@@ -1306,18 +1264,18 @@
printTracks(tweakedTracksMatchedToClusters);
}
-
+ */
// Sort matched tracks by momentum
List<Track> tracksSortedByMomentum = new Vector<Track>();
- for (Track tr : tweakedTracksMatchedToClusters.keySet()) {
+ for (Track tr : tracksMatchedToClusters.keySet()) {
tracksSortedByMomentum.add(tr);
}
Collections.sort(tracksSortedByMomentum, new MomentumSort());
Set<Cluster> seeds = clustersMatchedToTracks.keySet();
if (m_debug) {
- debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracks, seeds, tracksSortedByMomentum, tweakedTracksMatchedToClusters);
+ debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, tracksSortedByMomentum);
}
// Prep for linking
@@ -1373,12 +1331,12 @@
}
}
}
- smallClustersToShare.addAll(photonFragments);
+ //smallClustersToShare.addAll(photonFragments);
List<Cluster> linkableClusters = new Vector<Cluster>();
- linkableClusters.addAll(modifiedPhotonClusters);
+ linkableClusters.addAll(photons);
linkableClusters.addAll(linkableClustersExcludingPhotons);
- // Initially, cheat
+ // Set up scoring
resetPotentialLinks();
if (m_cheatOnScoring) {
initPotentialLinks_cheating(linkableClusters, clustersMatchedToTracks);
@@ -1397,8 +1355,8 @@
initPotentialLinks_MipClump(mipsNew, clumps, scaleFactorTrackToClump*penaltyForNewMips, true);
initPotentialLinks_MipMisc(mipsOld, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
initPotentialLinks_MipMisc(mipsNew, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
- initPotentialLinks_MipMisc(mipsOld, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
- initPotentialLinks_MipMisc(mipsNew, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
+ initPotentialLinks_MipMisc(mipsOld, seedPhotonClusters, thresholdForProximity, "Photon");
+ initPotentialLinks_MipMisc(mipsNew, seedPhotonClusters, thresholdForProximity, "Photon");
initPotentialLinks_MipMisc(mipsOld, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
initPotentialLinks_MipMisc(mipsNew, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
@@ -1410,8 +1368,7 @@
initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
- // TEST
- initPotentialLinks_Cone(seeds, linkableClustersExcludingPhotons, allHits, tracksMatchedToClusters, clustersMatchedToTweakedTracks, 0.95, 0.9);
+ initPotentialLinks_Cone(seeds, linkableClustersExcludingPhotons, allHits, tracksMatchedToClusters, clustersMatchedToTracks, 0.95, 0.9);
}
// Done making links. Prep & build skeletons:
@@ -1475,66 +1432,14 @@
List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
// Small clusters
- if (proximityShareForSmallClusters) {
- ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, maxDistanceForSmallClusters);
- SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAlgForSmallClusters);
- sharedSmallDTreeClusters.createShares(linkableClusters);
- sharedSmallDTreeClusters.rebuildHints();
- allSharedClusters.add(sharedSmallDTreeClusters);
- } else if (proximityAndConeShareForSmallClusters) {
+ {
MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(40.0, maxDistanceForSmallClusters));
- if (excludePhotonsFromCone) {
- proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
- }
+ proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), photons));
SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAndConeAlg);
- if (m_debug) {
- System.out.println("DEBUG: Dumping smallClustersToShare:");
- int tmpMaxSize = 0;
- for (Cluster clus : smallClustersToShare) {
- System.out.print("DEBUG: * clus with "+clus.getCalorimeterHits().size()+" hits");
- for (CalorimeterHit hit : clus.getCalorimeterHits()) {
- System.out.print(" "+hit.getCellID());
- }
- if (linkableClusters.contains(clus)) { System.out.print(" --[linkable]"); }
- if (leftoverHitClusters.contains(clus)) { System.out.print(" --[leftoverHitClusters]"); }
- if (mips.contains(clus)) { System.out.print(" --[mips]"); }
- if (clumps.contains(clus)) { System.out.print(" --[clumps]"); }
- if (treesWithNoStructure.contains(clus)) { System.out.print(" --[treesWithNoStructure]"); }
- if (seeds.contains(clus)) { System.out.print(" --[seeds]"); }
- if (modifiedPhotonClusters.contains(clus)) { System.out.print(" --[modifiedPhotonClusters]"); }
- System.out.println();
- if (clus.getCalorimeterHits().size() > tmpMaxSize) { tmpMaxSize = clus.getCalorimeterHits().size(); }
- }
- System.out.println("DEBUG: Dumping linkableClusters:");
- for (Cluster clus : linkableClusters) {
- System.out.print("DEBUG: * clus with "+clus.getCalorimeterHits().size()+" hits");
- if (clus.getCalorimeterHits().size() <= tmpMaxSize) {
- for (CalorimeterHit hit : clus.getCalorimeterHits()) {
- System.out.print(" "+hit.getCellID());
- }
- }
- if (linkableClusters.contains(clus)) { System.out.print(" --[linkable]"); }
- if (leftoverHitClusters.contains(clus)) { System.out.print(" --[leftoverHitClusters]"); }
- if (mips.contains(clus)) { System.out.print(" --[mips]"); }
- if (mipsNew.contains(clus)) { System.out.print(" --[mipsNew]"); }
- if (mipsOld.contains(clus)) { System.out.print(" --[mipsOld]"); }
- if (clumps.contains(clus)) { System.out.print(" --[clumps]"); }
- if (treesWithNoStructure.contains(clus)) { System.out.print(" --[treesWithNoStructure]"); }
- if (seeds.contains(clus)) { System.out.print(" --[seeds]"); }
- if (modifiedPhotonClusters.contains(clus)) { System.out.print(" --[modifiedPhotonClusters]"); }
- if (event.get(Cluster.class, "OldMipsInsideTreesECAL").contains(clus)) { System.out.print(" --[OldMipsInsideTreesECAL]"); }
- if (event.get(Cluster.class, "NewMipsInsideTreesECAL").contains(clus)) { System.out.print(" --[NewMipsInsideTreesECAL]"); }
- System.out.println();
- }
- }
sharedSmallDTreeClusters.createShares(linkableClusters);
sharedSmallDTreeClusters.rebuildHints();
allSharedClusters.add(sharedSmallDTreeClusters);
- } else {
- throw new AssertionError("Unhandled case!");
}
// ECAL halo
if (dTreeShareForHaloECAL) {
@@ -1543,45 +1448,6 @@
sharedLeftoverHitClustersECAL.createShares(linkableClusters);
sharedLeftoverHitClustersECAL.rebuildHints();
allSharedClusters.add(sharedLeftoverHitClustersECAL);
- } else if (proximityAndConeShareForECAL) {
- MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
- proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(20.0, maxDistanceForDTreeClustersECAL));
- if (excludePhotonsFromCone) {
- proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
- }
- SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, proximityAndConeAlg);
- sharedLeftoverHitClustersECAL.createShares(linkableClusters);
- sharedLeftoverHitClustersECAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersECAL);
- } else if (proximityAndConeAndDTreeShareForECAL) {
- MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
- proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(20.0, maxDistanceForProximityClustersECAL));
- if (excludePhotonsFromCone) {
- proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
- }
- proximityAndConeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForProximityClustersECAL));
- SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, proximityAndConeAlg);
- sharedLeftoverHitClustersECAL.createShares(linkableClusters);
- sharedLeftoverHitClustersECAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersECAL);
- } else if (coneAndDTreeShareForECAL) {
- MultipleClusterSharingAlgorithm coneAndDTreeAlg = new MultipleClusterSharingAlgorithm();
- if (excludePhotonsFromCone) {
- coneAndDTreeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- coneAndDTreeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
- }
- coneAndDTreeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForProximityClustersECAL));
- SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, coneAndDTreeAlg);
- sharedLeftoverHitClustersECAL.createShares(linkableClusters);
- sharedLeftoverHitClustersECAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersECAL);
- } else {
- throw new AssertionError("Unhandled case!");
}
// HCAL halo
if (dTreeShareForHaloHCAL) {
@@ -1590,108 +1456,16 @@
sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
sharedLeftoverHitClustersHCAL.rebuildHints();
allSharedClusters.add(sharedLeftoverHitClustersHCAL);
- } else if (steveMipShareForHaloHCAL) {
- String mapName;
- if (m_useSteveMipsForConeScoring) {
- mapName = "TrackMipMap";
- } else {
- mapName = "ShowerFinderMapTrackToMip";
- }
- SteveMIPReassignmentAlgorithm tmpMipAlg = new SteveMIPReassignmentAlgorithm(event, 1.0, mapName);
- DownstreamTrackClusterSharingAlgorithm coneSharingAlgHCAL = new DownstreamTrackClusterSharingAlgorithm(clustersMatchedToTweakedTracks, tmpMipAlg); // TEST
- SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneSharingAlgHCAL); // TEST
- List<Cluster> tmpSeedList = new Vector<Cluster>(); // TEST
- tmpSeedList.addAll(seeds); // TEST
- sharedLeftoverHitClustersHCAL.createShares(tmpSeedList); // TEST
- sharedLeftoverHitClustersHCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersHCAL);
- } else if (taejeongMipShareForHaloHCAL) {
- Map<Track, Set<Cluster>> tmpMap = new HashMap<Track, Set<Cluster>>(); // TEST
- for (Cluster clus : clustersMatchedToTweakedTracks.keySet()) {
- Track tr = clustersMatchedToTweakedTracks.get(clus);
- Set<Cluster> tmpSet = new HashSet<Cluster>();
- tmpSet.add(clus);
- tmpMap.put(tr, tmpSet);
- System.out.println("Seed with "+clus.getCalorimeterHits().size()+" -> Track");
- }
- System.out.println("DEBUG: [seeds] contains "+seeds.size()+" clusters.");
- System.out.println("DEBUG: [clustersMatchedToTweakedTracks] contains "+clustersMatchedToTweakedTracks.keySet().size()+" clusters.");
- MIPReassignmentAlgorithm tmpMipAlg = null; // FIXME: Use actual constructor
- DownstreamTrackClusterSharingAlgorithm coneSharingAlgHCAL = new DownstreamTrackClusterSharingAlgorithm(clustersMatchedToTweakedTracks, tmpMipAlg); // TEST
- SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneSharingAlgHCAL); // TEST
- List<Cluster> tmpSeedList = new Vector<Cluster>(); // TEST
- tmpSeedList.addAll(seeds); // TEST
- sharedLeftoverHitClustersHCAL.createShares(tmpSeedList); // TEST
- sharedLeftoverHitClustersHCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersHCAL);
- } else if (proximityAndConeShareForHCAL) {
- MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
- proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(50.0, maxDistanceForProximityClustersHCAL));
- if (excludePhotonsFromCone) {
- proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
- }
- SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, proximityAndConeAlg);
- sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
- sharedLeftoverHitClustersHCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersHCAL);
- } else if (proximityAndConeAndDTreeShareForHCAL) {
- MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
- proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(40.0, maxDistanceForProximityClustersHCAL));
- if (excludePhotonsFromCone) {
- proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
- }
- proximityAndConeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 40.0, maxDistanceForProximityClustersHCAL));
- SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, proximityAndConeAlg);
- sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
- sharedLeftoverHitClustersHCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersHCAL);
- } else if (coneAndDTreeShareForHCAL) {
- MultipleClusterSharingAlgorithm coneAndDTreeAlg = new MultipleClusterSharingAlgorithm();
- if (excludePhotonsFromCone) {
- coneAndDTreeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- coneAndDTreeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
- }
- coneAndDTreeAlg.addAlgorithm(new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 40.0, maxDistanceForProximityClustersHCAL));
- SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneAndDTreeAlg);
- sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
- sharedLeftoverHitClustersHCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersHCAL);
- } else {
- throw new AssertionError("Unhandled case!");
}
// Muon hit sharing...
- if (m_useMucalBarrel || m_useMucalEndcap) {
+ {
DTreeClusterSharingAlgorithm dTreeSharingAlgMCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, maxDistanceForDTreeClustersMCAL);
SharedClusterGroup sharedLeftoverHitClustersMCAL = new SharedClusterGroup(leftoverHitClustersToShareMCAL, dTreeSharingAlgMCAL);
sharedLeftoverHitClustersMCAL.createShares(linkableClusters);
sharedLeftoverHitClustersMCAL.rebuildHints();
allSharedClusters.add(sharedLeftoverHitClustersMCAL);
}
- // Forward hit sharing...
- if (m_useFcal) {
- DTreeClusterSharingAlgorithm dTreeSharingAlgFCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, maxDistanceForDTreeClustersFCAL);
- SharedClusterGroup sharedLeftoverHitClustersFCAL = new SharedClusterGroup(leftoverHitClustersToShareFCAL, dTreeSharingAlgFCAL);
- sharedLeftoverHitClustersFCAL.createShares(linkableClusters);
- sharedLeftoverHitClustersFCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersFCAL);
- }
- if (m_debug) {
- for (Cluster clus : clustersMatchedToTweakedTracks.keySet()) {
- Track tr = clustersMatchedToTweakedTracks.get(clus);
- double rawEnergy = energy(clus);
- double totEnergy = energy(clus, allSharedClusters);
- double trackMom = new BasicHep3Vector(tr.getMomentum()).magnitude();
- System.out.println("Seed cluster ["+clus.getCalorimeterHits().size()+"] -> Track [p="+trackMom+"] -> "
- +"raw energy "+rawEnergy+" -> total energy "+totEnergy);
- }
- }
-
// Iterate to build clusters:
for (int iIter=0; iIter<10; iIter++) {
newMapShowerComponentToTrack = new HashMap<Cluster, Track>();
@@ -1699,7 +1473,7 @@
newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
Map<Track, Set<Cluster>> mapTrackToVetoedAdditionsDueToTrackSeed = new HashMap<Track, Set<Cluster>>();
- clusteringIteration(tracksSortedByMomentum, tweakedTracksMatchedToClusters, electronTracks, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, allSharedClusters, newMapTrackToThreshold, newMapTrackToTolerance);
+ clusteringIteration(tracksSortedByMomentum, tracksMatchedToClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, allSharedClusters, newMapTrackToThreshold, newMapTrackToTolerance);
// Check on E/p, update scoring, make over-rides.
@@ -1708,7 +1482,7 @@
Set<Track> tracksWithVetoedLinkToUnusedCluster = new HashSet<Track>();
List<Set<Track>> jetLinks = new Vector<Set<Track>>();
- updateScoring(uniquelyMatchedTracks, ambiguouslyMatchedTracks, tracksSortedByMomentum, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, tracksWithEoverPTooLow, tracksWithEoverPMuchTooLow, tracksWithVetoedLinkToUnusedCluster, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks, linkableClusters);
+ updateScoring(tracksSortedByMomentum, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, tracksWithEoverPTooLow, tracksWithEoverPMuchTooLow, tracksWithVetoedLinkToUnusedCluster, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks, linkableClusters);
lookForVetoedLinks(tracksSortedByMomentum, tracksWithEoverPMuchTooLow, newMapTrackToShowerComponents, newMapShowerComponentToTrack, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks);
@@ -1770,7 +1544,7 @@
}
}
- applyOverrides(linkableClusters, photonLikePhotons, electronClusters, tracksMatchedToClusters, newMapShowerComponentToTrack, newMapTrackToShowerComponents, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, newMapTrackToVetoedAdditions, newMapTrackToTolerance, allSharedClusters);
+ applyOverrides(linkableClusters, tracksMatchedToClusters, newMapShowerComponentToTrack, newMapTrackToShowerComponents, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, newMapTrackToVetoedAdditions, newMapTrackToTolerance, allSharedClusters);
// Book-keeping for cone-adding
boolean m_usePhotonsForConeTrackFix = false;
@@ -1779,7 +1553,7 @@
Track matchedTrack = newMapShowerComponentToTrack.get(clus);
Set<Track> matchedJet = newMapShowerComponentToJet.get(clus);
if (matchedTrack == null && matchedJet == null) {
- if (modifiedPhotonClusters.contains(clus) && !m_usePhotonsForConeTrackFix) {
+ if (photons.contains(clus) && !m_usePhotonsForConeTrackFix) {
// This is a photon and we aren't using them for the cone -- ignore it
} else {
unassignedClusters.add(clus);
@@ -1819,23 +1593,27 @@
}
}
- printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
+ printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, photons, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
makePlotsOfEoverP(newMapTrackToShowerComponents, newMapJetToShowerComponents, mapTrackToJet, allSharedClusters);
- // Outputs
- makeParticlesAndWriteOut(trackList, tracksSortedByMomentum, unmatchedTracksThatDontReachCalorimeter, mapOrigTrackToTweakedTrack,
- tracksMatchedToClusters, clustersMatchedToTracks,
- electronTracks, modifiedPhotonClusters,electronClusters, photonLikePhotons, chargedHadronLikePhotons, vetoedPhotons,
- seedHadronLikePhotonClusters, nonSeedHadronLikePhotonClusters, nonSeedPhotonLikePhotonClusters,
- newMapTrackToShowerComponents, newMapShowerComponentToTrack,
- linkableClusters,
- mips, clumps, treesWithNoStructure, seedLeftoverHitClusters,
- jets, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet,
- allSharedClusters,
- smallClustersToShare, leftoverHitClustersToShare,
- allHits, allHitsEcalBarrel, allHitsEcalEndcap, allHitsHcalBarrel, allHitsHcalEndcap
- );
+ // Outputs -- FIXME
+ List<ReconstructedParticle> muonParticles = makeMuons();
+ List<ReconstructedParticle> electronParticles = makeElectrons();
+ List<ReconstructedParticle> jetParticles = makeJetParticles(tracksSortedByMomentum, newMapJetToShowerComponents, allSharedClusters);
+ List<ReconstructedParticle> nonJetChargedParticles = makeNonJetChargedParticles(tracksSortedByMomentum, newMapTrackToShowerComponents, mapTrackToJet, allSharedClusters);
+ List<ReconstructedParticle> neutralHadronParticles = makeNeutralHadrons(linkableClusters, newMapShowerComponentToTrack, newMapShowerComponentToJet, photons, allSharedClusters, vetoedPhotons);
+ List<ReconstructedParticle> photonParticles = makePhotons(photons, newMapShowerComponentToTrack, newMapShowerComponentToJet);
+ List<ReconstructedParticle> missedChargedParticles = makeChargedParticlesThatDontReachCalorimeter(unmatchedTracksThatDontReachCalorimeter);
+ List<ReconstructedParticle> allParticles = new Vector<ReconstructedParticle>();
+ allParticles.addAll(muonParticles);
+ allParticles.addAll(electronParticles);
+ allParticles.addAll(jetParticles);
+ allParticles.addAll(nonJetChargedParticles);
+ allParticles.addAll(neutralHadronParticles);
+ allParticles.addAll(photonParticles);
+ allParticles.addAll(missedChargedParticles);
+ m_event.put(m_outputParticleListName, allParticles);
m_event = null;
}
@@ -2035,8 +1813,11 @@
return output;
}
- protected void debugPrintTrackInfo(List<Track> trackList, List<Track> unmatchedTracks, Map<Track,Cluster> tracksMatchedToClusters, Set<Track> uniquelyMatchedTracks, Set<Track> ambiguouslyMatchedTracks, List<Track> tweakedTracks, Set<Cluster> seeds, List<Track> tracksSortedByMomentum, Map<Track, Cluster> tweakedTracksMatchedToClusters) {
- System.out.println("There were "+trackList.size()+" tracks in the event. Of these, "+unmatchedTracks.size()+" were unmatched and "+tracksMatchedToClusters.size()+" were matched. Of the track matches, "+uniquelyMatchedTracks.size()+" were unique and "+ambiguouslyMatchedTracks.size()+" were ambiguous. After tweaking, there were "+tweakedTracks.size()+" tracks. The event contains "+seeds.size()+" seeds.");
+ protected void debugPrintTrackInfo(List<Track> trackList,
+ List<Track> unmatchedTracks,
+ Map<Track,Cluster> tracksMatchedToClusters,
+ List<Track> tracksSortedByMomentum) {
+ System.out.println("There were "+trackList.size()+" tracks in the event. Of these, "+unmatchedTracks.size()+" were unmatched and "+tracksMatchedToClusters.size()+" were matched.");
System.out.println("Here are the "+unmatchedTracks.size()+" unmatched tracks:");
for (Track tr : unmatchedTracks) {
System.out.println(" * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
@@ -2044,23 +1825,6 @@
System.out.println("Here are the "+tracksMatchedToClusters.size()+" matched tracks:");
for (Track tr : tracksMatchedToClusters.keySet()) {
Cluster seed = tracksMatchedToClusters.get(tr);
- String printme = new String(" * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" matched to a seed with "+(seed.getCalorimeterHits().size())+" hits");
- if (uniquelyMatchedTracks.contains(tr)) {
- printme += " (unique)";
- }
- if (ambiguouslyMatchedTracks.contains(tr)) {
- printme += " (ambiguous)";
- }
- if (uniquelyMatchedTracks.contains(tr) && ambiguouslyMatchedTracks.contains(tr)) {
- throw new AssertionError("Book-keeping error");
- } else if (!uniquelyMatchedTracks.contains(tr) && !ambiguouslyMatchedTracks.contains(tr)) {
- throw new AssertionError("Book-keeping error");
- }
- System.out.println(printme);
- }
- System.out.println("Here are the "+tracksSortedByMomentum.size()+" tweaked tracks, sorted by momentum:");
- for (Track tr : tracksSortedByMomentum) {
- Cluster seed = tweakedTracksMatchedToClusters.get(tr);
System.out.println(" * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" matched to a seed with "+(seed.getCalorimeterHits().size())+" hits");
}
}
@@ -2227,10 +1991,14 @@
BaseReconstructedParticle part = new BaseReconstructedParticle();
// Set up cluster
part.addCluster(clus);
- Cluster sharedHitClus = makeClusterOfSharedHits(clus, allSharedClusters);
- if (sharedHitClus.getCalorimeterHits().size()>0) { part.addCluster(sharedHitClus); }
+ double clusterEnergy = energy(clus, m_photonCalib);
+ if (allSharedClusters != null) {
+ Cluster sharedHitClus = makeClusterOfSharedHits(clus, allSharedClusters);
+ if (sharedHitClus.getCalorimeterHits().size()>0) { part.addCluster(sharedHitClus); }
+ clusterEnergy = energy(clus, allSharedClusters, m_photonCalib);
+ }
+
// Calculate kinematic properties
- double clusterEnergy = energy(clus, allSharedClusters, m_photonCalib);
Hep3Vector pos = new BasicHep3Vector(clus.getPosition());
Hep3Vector threeMomentum = VecOp.mult(clusterEnergy, VecOp.unit(pos)); // assume it comes from the IP
HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
@@ -3362,6 +3130,320 @@
}
}
+ protected ReconstructedParticle makeSingleChargedParticle(Track tr, Cluster clus, int pdg) {
+ List<Cluster> clusList = new Vector<Cluster>();
+ clusList.add(clus);
+ return makeSingleChargedParticle(tr, clusList, pdg);
+ }
+
+ protected ReconstructedParticle makeSingleChargedParticle(Track tr, Collection<Cluster> clusters, int pdg) {
+ ParticleType type = ParticlePropertyManager.getParticlePropertyProvider().get(pdg);
+ BaseParticleID pid = new BaseParticleID(type);
+ double currentParticleMass = type.getMass();
+ BaseReconstructedParticle part = new BaseReconstructedParticle();
+ part.addTrack(tr);
+ for (Cluster clus : clusters) {
+ part.addCluster(clus);
+ }
+ part.setCharge(tr.getCharge());
+ Hep3Vector trackMomentum = momentum(tr);
+ double trackMomentumMag = trackMomentum.magnitude();
+ double trackEnergySq = trackMomentumMag*trackMomentumMag + currentParticleMass*currentParticleMass;
+ HepLorentzVector fourMomentum = new BasicHepLorentzVector(Math.sqrt(trackEnergySq), trackMomentum);
+ part.set4Vector(fourMomentum);
+ part.setMass(currentParticleMass);
+ part.setReferencePoint(new BasicHep3Vector(tr.getReferencePoint()));
+ part.addParticleID(pid);
+ part.setParticleIdUsed(pid);
+ return part;
+ }
+
+ protected List<ReconstructedParticle> makeMuons() {
+ // Constants:
+ int pdg_muminus = 13;
+ int pdg_muplus = -13;
+ // Read in:
+ Map<Track,Cluster> trackClusterMap = (Map<Track,Cluster>)(m_event.get("MuonTrackClusterMap")); // FIXME: Don't hard-code
+ // Make particles:
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ for (Track tr : trackClusterMap.keySet()) {
+ Cluster clus = trackClusterMap.get(tr);
+ ReconstructedParticle part = null;
+ if (tr.getCharge() > 0) {
+ part = makeSingleChargedParticle(tr, clus, pdg_muplus);
+ } else {
+ part = makeSingleChargedParticle(tr, clus, pdg_muminus);
+ }
+ outputParticleList.add(part);
+ }
+ return outputParticleList;
+ }
+
+ protected List<ReconstructedParticle> makeElectrons() {
+ // Constants
+ int pdg_electron = 11;
+ int pdg_positron = -11;
+ // Read in
+ Map<Track,Cluster> trackClusterMap = (Map<Track,Cluster>)(m_event.get("MapElectronTracksToClusters")); // FIXME: Don't hard-code
+ // Make particles
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ for (Track tr : trackClusterMap.keySet()) {
+ Cluster clus = trackClusterMap.get(tr);
+ ReconstructedParticle part = null;
+ if (tr.getCharge() > 0) {
+ part = makeSingleChargedParticle(tr, clus, pdg_positron);
+ } else {
+ part = makeSingleChargedParticle(tr, clus, pdg_electron);
+ }
+ outputParticleList.add(part);
+ }
+ return outputParticleList;
+ }
+
+ protected List<ReconstructedParticle> makeChargedParticlesThatDontReachCalorimeter(List<Track> unmatchedTracksThatDontReachCalorimeter) {
+ int pdg_piplus = 211;
+ int pdg_piminus = -211;
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ List<Cluster> emptyList = new Vector<Cluster>();
+ for (Track tr : unmatchedTracksThatDontReachCalorimeter) {
+ ReconstructedParticle part = null;
+ if (tr.getCharge() > 0) {
+ part = makeSingleChargedParticle(tr, emptyList, pdg_piplus);
+ } else {
+ part = makeSingleChargedParticle(tr, emptyList, pdg_piminus);
+ }
+ outputParticleList.add(part);
+ }
+ return outputParticleList;
+ }
+
+
+ protected List<ReconstructedParticle> makeNonJetChargedParticles(Collection<Track> tracksSortedByMomentum, Map<Track,Set<Cluster>> newMapTrackToShowerComponents, Map<Track, Set<Track>> mapTrackToJet, List<SharedClusterGroup> allSharedClusters) {
+ int pdg_piplus = 211;
+ int pdg_piminus = -211;
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ for (Track tr : tracksSortedByMomentum) {
+ if (mapTrackToJet.get(tr) != null) {
+ // Handle it as a jet instead
+ continue;
+ }
+ Set<Cluster> clusters = newMapTrackToShowerComponents.get(tr);
+ Cluster sharedHitClus = makeClusterOfSharedHits(clusters, allSharedClusters);
+ if (sharedHitClus.getCalorimeterHits().size()>0) { clusters.add(sharedHitClus); }
+ ReconstructedParticle part = null;
+ if (tr.getCharge() > 0) {
+ part = makeSingleChargedParticle(tr, clusters, pdg_piplus);
+ } else {
+ part = makeSingleChargedParticle(tr, clusters, pdg_piminus);
+ }
+ outputParticleList.add(part);
+ }
+ return outputParticleList;
+ }
+
+ protected List<ReconstructedParticle> makeJetParticles(Collection<Track> tracksSortedByMomentum, Map<Set<Track>, Set<Cluster>> newMapJetToShowerComponents, List<SharedClusterGroup> allSharedClusters) {
+ int pdg_piplus = 211;
+ int pdg_piminus = -211;
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ for (Set<Track> jet : newMapJetToShowerComponents.keySet()) {
+ boolean firstTrackInJet = true;
+ for (Track tr : jet) {
+ Set<Cluster> clusters = null;
+ if (firstTrackInJet) {
+ // Assign all clusters to first track in jet
+ clusters = newMapJetToShowerComponents.get(jet);
+ Cluster sharedHitClus = makeClusterOfSharedHits(clusters, allSharedClusters);
+ if (sharedHitClus.getCalorimeterHits().size()>0) { clusters.add(sharedHitClus); }
+ firstTrackInJet = false;
+ } else {
+ // Other tracks get none
+ clusters = new HashSet<Cluster>();
+ }
+ ReconstructedParticle part = null;
+ if (tr.getCharge() > 0) {
+ part = makeSingleChargedParticle(tr, clusters, pdg_piplus);
+ } else {
+ part = makeSingleChargedParticle(tr, clusters, pdg_piminus);
+ }
+ outputParticleList.add(part);
+ }
+ }
+ return outputParticleList;
+ }
+
+ protected List<ReconstructedParticle> makePhotons(Collection<Cluster> photons,
+ Map<Cluster, Track> newMapShowerComponentToTrack,
+ Map<Cluster, Set<Track>> newMapShowerComponentToJet)
+ {
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ for (Cluster clus : photons) {
+ // Verify not used by any charged shower
+ Track tr = newMapShowerComponentToTrack.get(clus);
+ Set<Track> jet = newMapShowerComponentToJet.get(clus);
+ boolean usedByCharged = (tr != null || jet!=null);
+ if (!usedByCharged) {
+ BaseReconstructedParticle part = makePhoton(clus, null); // don't use shared hits
+ outputParticleList.add(part);
+ }
+ }
+ return outputParticleList;
+ }
+
+ protected List<ReconstructedParticle> makeNeutralHadrons(List<Cluster> linkableClusters,
+ Map<Cluster, Track> newMapShowerComponentToTrack,
+ Map<Cluster, Set<Track>> newMapShowerComponentToJet,
+ List<Cluster> photons,
+ List<SharedClusterGroup> allSharedClusters,
+ List<Cluster> vetoedPhotons
+ )
+ {
+ // Prep
+ int pdg_K0 = 130;
+ ParticleType type_K0 = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_K0);
+ BaseParticleID pid_K0 = new BaseParticleID(type_K0);
+ double mass_K0 = type_K0.getMass();
+
+ // Find neutral hadron pieces
+ List<Cluster> unmatchedClusterPieces = new Vector<Cluster>();
+ for (Cluster clus : linkableClusters) {
+ Track tr = newMapShowerComponentToTrack.get(clus);
+ Set<Track> jetOfCluster = newMapShowerComponentToJet.get(clus);
+ boolean usedByCharged = (tr != null || jetOfCluster != null);
+ boolean isPhoton = photons.contains(clus);
+ if (!usedByCharged && !isPhoton) {
+ unmatchedClusterPieces.add(clus);
+ }
+ }
+
+ // Infrastructure for cores of neutral hadron showers:
+ List<Cluster> neutralClusterCores = new Vector<Cluster>();
+ Map<Cluster, Set<Cluster>> clustersOfNeutralClusterCore = new HashMap<Cluster, Set<Cluster>>();
+ Map<Cluster, Cluster> neutralClusterCoreOfCluster = new HashMap<Cluster, Cluster>();
+ Set<Cluster> unusedUnmatchedClusterPieces = new HashSet<Cluster>();
+ unusedUnmatchedClusterPieces.addAll(unmatchedClusterPieces);
+
+ // If a photon-like cluster was vetoed because of a nearby MIP but was
+ // never used in a charged shower, re-flag it as a photon.
+ Set<Cluster> extraClustersToTreatAsPhotons = new HashSet<Cluster>();
+ for (Cluster clus : vetoedPhotons) {
+ if (unusedUnmatchedClusterPieces.contains(clus)) {
+ // Treat this one as a photon instead
+ unusedUnmatchedClusterPieces.remove(clus);
+ extraClustersToTreatAsPhotons.add(clus);
+ }
+ }
+
+ // Clustering pass
+ double threshold = 0.7;
+ int crosscheckCount = unusedUnmatchedClusterPieces.size();
+ for (Cluster clus : unmatchedClusterPieces) {
+ if (unusedUnmatchedClusterPieces.contains(clus)) {
+ // Try to build a cluster...
+ Set<Cluster> piecesForThisCluster = new HashSet<Cluster>();
+ piecesForThisCluster.add(clus);
+ unusedUnmatchedClusterPieces.remove(clus);
+ List<Cluster> componentsInPreviousTier = new Vector<Cluster>();
+ componentsInPreviousTier.add(clus);
+ for (int iTier=0; iTier<25; iTier++) { // 25 is arbitrary cap in case we get stuck.
+ List<Cluster> componentsInNextTier = new Vector<Cluster>();
+ for (Cluster miniSeed : componentsInPreviousTier) {
+ // Search for links above threshold
+ List<ScoredLink> potentialLinks = m_potentialLinks.get(miniSeed);
+ if (potentialLinks != null) {
+ for (ScoredLink link : potentialLinks) {
+ if (link.score() < threshold) { break ; }
+ Cluster newClus = link.counterpart(miniSeed);
+ if (newClus == null) { throw new AssertionError("Null link!"); }
+ // Ensure that we haven't already added component
+ if (unusedUnmatchedClusterPieces.contains(newClus)) {
+ componentsInNextTier.add(newClus);
+ unusedUnmatchedClusterPieces.remove(newClus);
+ piecesForThisCluster.add(newClus);
+ }
+ }
+ }
+ }
+ componentsInPreviousTier = componentsInNextTier;
+ if (componentsInNextTier.size()==0) { break; }
+ }
+ Cluster newShower = makeCombinedCluster(piecesForThisCluster);
+ neutralClusterCores.add(newShower);
+ clustersOfNeutralClusterCore.put(newShower, piecesForThisCluster);
+ for (Cluster piece : piecesForThisCluster) {
+ neutralClusterCoreOfCluster.put(piece, newShower);
+ }
+ }
+ }
+
+ // There may be some "neutral hadron cluster cores" that are too small to be
+ // real -- their measured energy is below their mass. They will need special treatment.
+ Set<Cluster> lowEnergyNeutralShowers = new HashSet<Cluster>();
+ for (Cluster clus : neutralClusterCores) {
+ double clusterEnergy = energy(clus, allSharedClusters, m_neutralCalib);
+ if (clusterEnergy <= mass_K0) {
+ lowEnergyNeutralShowers.add(clus);
+ }
+ }
+ // If they are entirely in the ECAL then assume they are photons.
+ for (Cluster clus : lowEnergyNeutralShowers) {
+ List<CalorimeterHit> hits = clus.getCalorimeterHits();
+ ListFilter filter = new ListFilter(new HitInECALDecision());
+ List<CalorimeterHit> hitsInEcal = filter.filterList(hits);
+ boolean allHitsInEcal = (hitsInEcal.size() == hits.size());
+ if (allHitsInEcal) {
+ extraClustersToTreatAsPhotons.add(clus);
+ } else {
+ // Not entirely in ECAL. For now, just add it to whatever non-tiny neutral
+ // cluster is nearest. (Not ideal, but not crazy.)
+ Cluster nearestCluster = null;
+ double nearestDistance = 0;
+ for (Cluster otherClus : neutralClusterCores) {
+ if (lowEnergyNeutralShowers.contains(otherClus)) {
+ // Don't attach to another teeny shower
+ continue;
+ } else {
+ double dist = proximity(clus, otherClus);
+ if (nearestCluster==null || dist<nearestDistance) {
+ nearestCluster = otherClus;
+ nearestDistance = dist;
+ }
+ }
+ }
+ if (nearestCluster != null) {
+ // Migrate to the other cluster
+ ((BasicCluster)(nearestCluster)).addCluster(clus);
+ neutralClusterCores.remove(clus);
+ } else {
+ // Couldn't attach it to anything
+ double clusterEnergy = energy(clus, allSharedClusters, m_neutralCalib);
+ System.out.println("WARNING: Unable to attach teeny neutral hadron cluster with E="+clusterEnergy+" to a larger shower. It will have unphysical energy.");
+ }
+ }
+ }
+
+ // Build particles
+ List<Cluster> neutralClusterCoresUsed = new Vector<Cluster>();
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ for (Cluster clus : neutralClusterCores) {
+ if (extraClustersToTreatAsPhotons.contains(clus)) {
+ // Skip this (already used as a photon)
+ continue;
+ }
+ // Write out neutral hadron
+ BaseReconstructedParticle part = makeNeutralHadron(clus, allSharedClusters);
+ neutralClusterCoresUsed.add(clus);
+ // Add to the output list
+ outputParticleList.add(part);
+ }
+
+ List<ReconstructedParticle> outputPhotons = makePhotons(extraClustersToTreatAsPhotons, newMapShowerComponentToTrack, newMapShowerComponentToJet);
+ outputParticleList.addAll(outputPhotons);
+
+ return outputParticleList;
+ }
+
+ /*
+
void makeParticlesAndWriteOut(
List<Track> trackList, List<Track> tracksSortedByMomentum, List<Track> unmatchedTracksThatDontReachCalorimeter,
Map<Track, Track> mapOrigTrackToTweakedTrack,
@@ -3635,117 +3717,116 @@
// for non-jet tracks. For tracks in jets, need to be clever to make sure
// each hit appears once and once only.
// FIXME: This is now screwed up for MultipleTrackTracks.
-/*
- Set<Track> jet = mapTrackToJet.get(tr);
- boolean trackInJet = m_clusterAsJets && jet != null;
- if (!trackInJet) {
- outputParticleListForConfusionMatrix_singleTracks.addAll(particles);
- // Make copy with cluster energy...
- BaseReconstructedParticle part2 = new BaseReconstructedParticle();
- BaseReconstructedParticle part3 = new BaseReconstructedParticle();
- if (sharedHitClus.getCalorimeterHits().size()>0) { part2.addCluster(sharedHitClus); }
- if (sharedHitClus.getCalorimeterHits().size()>0) { part3.addCluster(sharedHitClus); }
- part2.addTrack(tr);
- part3.addTrack(tr);
- part2.setCharge(tr.getCharge());
- part3.setCharge(tr.getCharge());
- part2.setMass(mass_piplus);
- part3.setMass(mass_piplus);
- part2.setReferencePoint(new BasicHep3Vector(tr.getReferencePoint()));
- part3.setReferencePoint(new BasicHep3Vector(tr.getReferencePoint()));
- part2.addParticleID(pid_piplus);
- part3.addParticleID(pid_piplus);
- part2.setParticleIdUsed(pid_piplus);
- part3.setParticleIdUsed(pid_piplus);
- double clusterEnergy = energy(showerComponents, allSharedClusters);
- double clusterMomentumMagSq = clusterEnergy*clusterEnergy;
- if (clusterMomentumMagSq<0.0) { clusterMomentumMagSq = 0.0; }
- Hep3Vector calPos = new BasicHep3Vector(sharedHitClus.getPosition());
- Hep3Vector calThreeMomentum = VecOp.mult(Math.sqrt(clusterMomentumMagSq), VecOp.unit(calPos)); // assume it comes from the IP
- HepLorentzVector calFourMomentum = new BasicHepLorentzVector(clusterEnergy, calThreeMomentum);
- part2.set4Vector(calFourMomentum);
- outputChargedParticleListWithClusterEnergy.add(part2);
- // Use old calibration
- DetailedNeutralHadronClusterEnergyCalculator ronNeutralHadronCalib = new DetailedNeutralHadronClusterEnergyCalculator();
- double clusterEnergyOld = energy(showerComponents, allSharedClusters, m_neutralCalib);
- Hep3Vector calThreeMomentumOld = VecOp.mult(clusterEnergyOld, VecOp.unit(calPos));
- HepLorentzVector calFourMomentumOld = new BasicHepLorentzVector(clusterEnergyOld, calThreeMomentumOld);
- part3.set4Vector(calFourMomentumOld);
- outputChargedParticleListWithClusterEnergy_oldCalib.add(part3);
- if (punchThroughTracks.contains(tr)) {
- outputChargedParticleListWithClusterEnergy_punchThrough.add(part2);
- outputChargedParticleListWithClusterEnergy_punchThrough_oldCalib.add(part3);
- //System.out.println("DEBUG: Recorded track with punch-through");
- } else {
- outputChargedParticleListWithClusterEnergy_noPunchThrough.add(part2);
- outputChargedParticleListWithClusterEnergy_noPunchThrough_oldCalib.add(part3);
- //System.out.println("DEBUG: Recorded track with no punch-through");
- }
- } else {
- // Whether or not we write clusters to this track,
- // most of the ReconstructedParticle is the same:
- BaseReconstructedParticle partForMatrix = new BaseReconstructedParticle();
- partForMatrix.addTrack(tr);
- partForMatrix.setCharge(tr.getCharge());
- partForMatrix.set4Vector(fourMomentum);
- partForMatrix.setMass(mass_piplus);
- partForMatrix.setReferencePoint(new BasicHep3Vector(tr.getReferencePoint()));
- if (isElectron) {
- partForMatrix.addParticleID(pid_electron);
- partForMatrix.setParticleIdUsed(pid_electron);
- } else {
- partForMatrix.addParticleID(pid_piplus);
- partForMatrix.setParticleIdUsed(pid_piplus);
- }
- // Now figure out whether we need to add clusters:
- boolean writeClustersToThisTrack = (trackToUseForEachJetForConfusionMatrix.get(jet) == tr);
- if (writeClustersToThisTrack) {
- // Yes -- add all hits in jet
- Set<Cluster> jetShowerComponents = newMapJetToShowerComponents.get(jet);
- for (Cluster clus : jetShowerComponents) { partForMatrix.addCluster(clus); }
- Cluster jetSharedHitClus = makeClusterOfSharedHits(jetShowerComponents, allSharedClusters);
- if (jetSharedHitClus.getCalorimeterHits().size()>0) { partForMatrix.addCluster(jetSharedHitClus); }
- outputParticleListForConfusionMatrix_jetTracksWithClusters.add(partForMatrix);
- // Make copy with cluster energy...
- BaseReconstructedParticle part2 = new BaseReconstructedParticle();
- BaseReconstructedParticle part3 = new BaseReconstructedParticle();
- if (jetSharedHitClus.getCalorimeterHits().size()>0) { part2.addCluster(jetSharedHitClus); }
- if (jetSharedHitClus.getCalorimeterHits().size()>0) { part3.addCluster(jetSharedHitClus); }
- int charge = 0;
- for (Track jetTr : jet) { part2.addTrack(jetTr); charge += jetTr.getCharge(); }
- part2.setCharge(charge);
- part3.setCharge(charge);
- part2.setReferencePoint(new BasicHep3Vector(0,0,0));
- part3.setReferencePoint(new BasicHep3Vector(0,0,0));
- double clusterEnergy = energy(jetShowerComponents, allSharedClusters);
- Hep3Vector calPos = new BasicHep3Vector(jetSharedHitClus.getPosition());
- Hep3Vector calThreeMomentum = VecOp.mult(Math.sqrt(clusterEnergy), VecOp.unit(calPos)); // assume it comes from the IP
- HepLorentzVector calFourMomentum = new BasicHepLorentzVector(clusterEnergy, calThreeMomentum);
- part2.set4Vector(calFourMomentum);
- outputChargedParticleListWithClusterEnergy.add(part2);
- // Use old calibration
- DetailedNeutralHadronClusterEnergyCalculator ronNeutralHadronCalib = new DetailedNeutralHadronClusterEnergyCalculator();
- double clusterEnergyOld = energy(jetShowerComponents, allSharedClusters, m_neutralCalib);
- Hep3Vector calThreeMomentumOld = VecOp.mult(clusterEnergyOld, VecOp.unit(calPos));
- HepLorentzVector calFourMomentumOld = new BasicHepLorentzVector(clusterEnergyOld, calThreeMomentumOld);
- part3.set4Vector(calFourMomentumOld);
- outputChargedParticleListWithClusterEnergy_oldCalib.add(part3);
- if (punchThroughJets.contains(jet)) {
- outputChargedParticleListWithClusterEnergy_punchThrough.add(part2);
- outputChargedParticleListWithClusterEnergy_punchThrough_oldCalib.add(part3);
- //System.out.println("DEBUG: Recorded jet with punch-through");
- } else {
- outputChargedParticleListWithClusterEnergy_noPunchThrough.add(part2);
- outputChargedParticleListWithClusterEnergy_noPunchThrough_oldCalib.add(part3);
- //System.out.println("DEBUG: Recorded jet with no punch-through");
- }
- } else {
- // No -- add an empty cluster
[truncated at 1000 lines; 207 more skipped]