lcsim/src/org/lcsim/recon/pfa/structural
diff -u -r1.17 -r1.18
--- ReclusterDTreeDriver.java 16 Oct 2008 22:40:47 -0000 1.17
+++ ReclusterDTreeDriver.java 22 Oct 2008 17:55:16 -0000 1.18
@@ -35,18 +35,21 @@
* in this package, which uses the implementation in
* org.lcsim.recon.cluster.directedtree developed by NIU).
*
- * @version $Id: ReclusterDTreeDriver.java,v 1.17 2008/10/16 22:40:47 mcharles Exp $
+ * @version $Id: ReclusterDTreeDriver.java,v 1.18 2008/10/22 17:55:16 mcharles Exp $
* @author Mat Charles <[log in to unmask]>
*/
public class ReclusterDTreeDriver extends ReclusterDriver {
protected String m_dTreeClusterListName;
+ protected String m_muonTrackClusterMapName;
protected boolean m_cheatOnPhotonsMisidentifiedAsNeutralHadrons = false;
protected boolean m_cheatOnHadronsMisidentifiedAsPhotons = false;
protected boolean m_cheatOnPhotons = false;
+ protected boolean m_muonDebug = true;
+ protected boolean m_electronDebug = false;
protected boolean m_photonDebug = false;
protected boolean m_photonSplitDebug = false;
protected boolean m_jetDebug = false;
@@ -56,7 +59,7 @@
protected boolean m_oldMipFinderCrossesTrees = true;
protected boolean m_useNewMipFinder = true;
protected boolean m_useOldMipFinder = true;
- protected boolean m_removePoorQualityMips = false;
+ protected boolean m_removePoorQualityMips = false; // DANGER: STILL USED
protected boolean m_clusterAsJets = true;
protected boolean m_ignorePunchThroughTracksForJets = true;
protected boolean m_useTracksThatDontReachCalorimeter = true;
@@ -65,19 +68,19 @@
protected boolean m_allowElectrons = true;
protected boolean m_allowPhotonSeeds = true;
- protected boolean m_splitPhotonSeeds = true;
protected boolean m_allPhotonsAreValidSeeds = true;
-
+ protected boolean m_checkForPhotonTrackOverlap = false;
+
protected boolean m_allowNeutralCalibForEoverP = false;
- protected int m_minHitsToBeTreatedAsClusterECAL = 15;
- protected int m_minHitsToBeTreatedAsClusterHCAL = 20;
- protected int m_minHitsToBeTreatedAsClusterMCAL = 5;
- protected int m_minHitsToBeTreatedAsClusterFCAL = m_minHitsToBeTreatedAsClusterECAL;
- protected double m_newMipFinderRadiusECAL = 20.0;
- protected double m_newMipFinderRadiusHCAL = 50.0;
- protected double m_newMipFinderRadiusMCAL = 100.0;
- protected double m_newMipFinderRadiusFCAL = m_newMipFinderRadiusECAL;
+ //protected int m_minHitsToBeTreatedAsClusterECAL = 15;
+ //protected int m_minHitsToBeTreatedAsClusterHCAL = 20;
+ //protected int m_minHitsToBeTreatedAsClusterMCAL = 5;
+ //protected int m_minHitsToBeTreatedAsClusterFCAL = m_minHitsToBeTreatedAsClusterECAL;
+ protected double m_newMipFinderRadiusECAL = 20.0; // DANGER: STILL USED
+ protected double m_newMipFinderRadiusHCAL = 50.0; // DANGER: STILL USED
+ //protected double m_newMipFinderRadiusMCAL = 100.0;
+ //protected double m_newMipFinderRadiusFCAL = m_newMipFinderRadiusECAL;
protected boolean m_allowSharingOfIsolatedHits = true;
protected double m_jetScoreThreshold = 0.7; // don't hard-code
@@ -94,6 +97,17 @@
protected boolean m_useMucalEndcap = true;
protected boolean m_useFcal = false;
+ protected List<String> m_inputMips = new Vector<String>();
+ protected List<String> m_inputClumps = new Vector<String>();
+ protected List<String> m_inputBlocks = new Vector<String>();
+ protected List<String> m_inputLeftoverHits = new Vector<String>();
+ protected List<String> m_inputTrackClusterMaps = new Vector<String>();
+ public void addInputMips(String name) { m_inputMips.add(name); }
+ public void addInputClumps(String name) { m_inputClumps.add(name); }
+ public void addInputBlocks(String name) { m_inputBlocks.add(name); }
+ public void addInputLeftoverHits(String name) { m_inputLeftoverHits.add(name); }
+ public void addTrackToClusterMap(String name) { m_inputTrackClusterMaps.add(name); }
+
public void writeExtraEventOutput(boolean writeExtra) {
m_writeExtraEventOutput = writeExtra;
}
@@ -102,9 +116,9 @@
super.suspend();
}
- public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList) {
+ public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList, String muonTrackClusterMap, HelixExtrapolator findCluster) {
System.out.println("ReclusterDTreeDriver version 0.5");
- initTrackMatch();
+ initTrackMatch(findCluster);
initCalibration();
initPlots();
m_dTreeClusterListName = dTreeClusterList;
@@ -112,64 +126,7 @@
m_mcList = mcList;
m_eval = new LikelihoodEvaluatorWrapper();
m_outputParticleListName = "DTreeReclusteredParticles";
-
- // Look for hits near boundaries:
- HitNearBarrelEndcapBoundaryDecision dec = new HitNearBarrelEndcapBoundaryDecision(6.0, 15.0, 1);
- add(dec);
- add(new ListFilterDriver(dec, "EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
- add(new ListFilterDriver(dec, "HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
- add(new ListFilterDriver(dec, "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
- add(new ListFilterDriver(dec, "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
- add(new TransientFlagDriver("EcalBarrDigiHitsNearBoundary"));
- add(new TransientFlagDriver("HcalBarrDigiHitsNearBoundary"));
- add(new TransientFlagDriver("EcalEndcapDigiHitsNearBoundary"));
- add(new TransientFlagDriver("HcalEndcapDigiHitsNearBoundary"));
-
- // Set up driver to look for structure inside clusters:
- FindSubClusters clusDriverECAL = new FindSubClusters("DTreeClustersECAL", m_newMipFinderRadiusECAL, m_minHitsToBeTreatedAsClusterECAL, m_removePoorQualityMips, "OldMipsInsideTreesECAL", "NewMipsInsideTreesECAL", "ClumpsInsideTreesECAL", "BlocksInsideTreesECAL", "LeftoverHitsInsideTreesECAL", "MapTreeToTargetsECAL", "MapSharedToTreeECAL");
- FindSubClusters clusDriverHCAL = new FindSubClusters("DTreeClustersHCAL", m_newMipFinderRadiusHCAL, m_minHitsToBeTreatedAsClusterHCAL, m_removePoorQualityMips, "OldMipsInsideTreesHCAL", "NewMipsInsideTreesHCAL", "ClumpsInsideTreesHCAL", "BlocksInsideTreesHCAL", "LeftoverHitsInsideTreesHCAL", "MapTreeToTargetsHCAL", "MapSharedToTreeHCAL");
- FindSubClusters clusDriverMCAL = new FindSubClusters("DTreeClustersMCAL", m_newMipFinderRadiusMCAL, m_minHitsToBeTreatedAsClusterMCAL, m_removePoorQualityMips, "OldMipsInsideTreesMCAL", "NewMipsInsideTreesMCAL", "ClumpsInsideTreesMCAL", "BlocksInsideTreesMCAL", "LeftoverHitsInsideTreesMCAL", "MapTreeToTargetsMCAL", "MapSharedToTreeMCAL");
- FindSubClusters clusDriverFCAL = new FindSubClusters("DTreeClustersFCAL", m_newMipFinderRadiusFCAL, m_minHitsToBeTreatedAsClusterFCAL, m_removePoorQualityMips, "OldMipsInsideTreesFCAL", "NewMipsInsideTreesFCAL", "ClumpsInsideTreesFCAL", "BlocksInsideTreesFCAL", "LeftoverHitsInsideTreesFCAL", "MapTreeToTargetsFCAL", "MapSharedToTreeFCAL");
- if (m_oldMipFinderCrossesTrees) {
- clusDriverECAL.enableBarrelEndcapCrossing("EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary");
- clusDriverHCAL.enableBarrelEndcapCrossing("HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary");
- }
- if (m_findExtraNNClusters) {
- clusDriverECAL.setNNrange(1,1,1);
- clusDriverHCAL.setNNrange(2,2,1);
- clusDriverMCAL.setNNrange(2,2,1);
- clusDriverFCAL.setNNrange(1,1,1);
- }
- add(clusDriverECAL);
- add(clusDriverHCAL);
- // Avoid too much output:
- add(new TransientFlagDriver("OldMipsInsideTreesECAL"));
- add(new TransientFlagDriver("NewMipsInsideTreesECAL"));
- add(new TransientFlagDriver("ClumpsInsideTreesECAL"));
- add(new TransientFlagDriver("BlocksInsideTreesECAL"));
- add(new TransientFlagDriver("LeftoverHitsInsideTreesECAL"));
- add(new TransientFlagDriver("OldMipsInsideTreesHCAL"));
- add(new TransientFlagDriver("NewMipsInsideTreesHCAL"));
- add(new TransientFlagDriver("ClumpsInsideTreesHCAL"));
- add(new TransientFlagDriver("BlocksInsideTreesHCAL"));
- add(new TransientFlagDriver("LeftoverHitsInsideTreesHCAL"));
- // Write out mucal, fcal if needed:
- if (m_useMucalBarrel || m_useMucalEndcap) {
- add(clusDriverMCAL);
- add(new TransientFlagDriver("OldMipsInsideTreesMCAL"));
- add(new TransientFlagDriver("NewMipsInsideTreesMCAL"));
- add(new TransientFlagDriver("ClumpsInsideTreesMCAL"));
- add(new TransientFlagDriver("BlocksInsideTreesMCAL"));
- add(new TransientFlagDriver("LeftoverHitsInsideTreesMCAL"));
- }
- if (m_useFcal) {
- add(clusDriverFCAL);
- add(new TransientFlagDriver("OldMipsInsideTreesFCAL"));
- add(new TransientFlagDriver("NewMipsInsideTreesFCAL"));
- add(new TransientFlagDriver("ClumpsInsideTreesFCAL"));
- add(new TransientFlagDriver("BlocksInsideTreesFCAL"));
- add(new TransientFlagDriver("LeftoverHitsInsideTreesFCAL"));
- }
+ m_muonTrackClusterMapName = muonTrackClusterMap;
}
public void process(EventHeader event) {
@@ -179,12 +136,6 @@
// Some likelihood selectors need per-event info
supplyEventToLikelihoodSelectors();
- // Steve's pre-shower MIP-finder
- {
- SteveMipWrapper tmpWrapper = new SteveMipWrapper();
- tmpWrapper.process(event);
- }
-
if (m_oldMipFinderCrossesTrees) {
m_allowComponentsToStraddleLargeClusters = true;
}
@@ -205,10 +156,10 @@
List<CalorimeterHit> allHitsMcalEndcap = null;
List<CalorimeterHit> allHitsFcalEndcap = null;
if (m_useMucalBarrel) {
- allHitsMcalBarrel = m_event.get(CalorimeterHit.class, "MuonBarrDigiHits");
+ allHitsMcalBarrel = m_event.get(CalorimeterHit.class, "CorrMuonBarrDigiHits");
}
if (m_useMucalEndcap) {
- allHitsMcalEndcap = m_event.get(CalorimeterHit.class, "MuonEndcapDigiHits");
+ allHitsMcalEndcap = m_event.get(CalorimeterHit.class, "CorrMuonEndcapDigiHits");
}
if (m_useFcal) {
allHitsFcalEndcap = m_event.get(CalorimeterHit.class, "ForwardEcalEndcapDigiHits");
@@ -228,6 +179,7 @@
if (m_useFcal) {
allHits.addAll(allHitsFcalEndcap);
}
+ HitMap allHitsMap = new HitMap(allHits);
for (Cluster photon : photons) {
if (photon.getCalorimeterHits().contains(null)) {
@@ -242,242 +194,127 @@
List<Cluster> clumps = new Vector<Cluster>();
List<Cluster> leftoverHitClusters = new Vector<Cluster>();
List<Cluster> treesWithNoStructure = new Vector<Cluster>();
- mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesECAL"));
- mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesHCAL"));
- mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesECAL"));
- mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesHCAL"));
- clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesECAL"));
- clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesHCAL"));
- leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesECAL"));
- leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesHCAL"));
- treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesECAL"));
- treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesHCAL"));
- if (m_useMucalBarrel || m_useMucalEndcap) {
- mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesMCAL"));
- mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesMCAL"));
- clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesMCAL"));
- leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesMCAL"));
- treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesMCAL"));
+
+ for (String name : m_inputMips) {
+ mipsOld.addAll(event.get(Cluster.class, name)); // FIXME: OLD VS NEW MIPS?
}
- if (m_useFcal) {
- mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesFCAL"));
- mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesFCAL"));
- clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesFCAL"));
- leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesFCAL"));
- treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesFCAL"));
+ for (String name : m_inputClumps) {
+ clumps.addAll(event.get(Cluster.class, name));
+ }
+ for (String name : m_inputBlocks) {
+ treesWithNoStructure.addAll(event.get(Cluster.class, name));
}
+ for (String name : m_inputLeftoverHits) {
+ leftoverHitClusters.addAll(event.get(Cluster.class, name));
+ }
+
List<Cluster> mips = new Vector<Cluster>();
mips.addAll(mipsOld);
mips.addAll(mipsNew);
- if (m_debug) {
- System.out.println("Found "+mips.size()+" mips, "+clumps.size()+" clumps, "+photons.size()+" photons, "+leftoverHitClusters.size()+" leftover-hit-clusters, "+treesWithNoStructure.size()+" large DTrees with no structure, and "+trackList.size()+" tracks in event.");
- }
- // Also load book-keeping maps that record what subclusters or hits
- // belong to what tree:
- Map<Cluster, List<Cluster>> targetsInTreeECAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsECAL")));
- Map<Cluster, Cluster> treeOfSharedClusterECAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeECAL")));
- Map<Cluster, List<Cluster>> targetsInTreeHCAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsHCAL")));
- Map<Cluster, Cluster> treeOfSharedClusterHCAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeHCAL")));
- Map<Cluster, List<Cluster>> targetsInTreeMCAL = null;
- Map<Cluster, Cluster> treeOfSharedClusterMCAL = null;
- Map<Cluster, List<Cluster>> targetsInTreeFCAL = null;
- Map<Cluster, Cluster> treeOfSharedClusterFCAL = null;
- if (m_useMucalBarrel || m_useMucalEndcap) {
- targetsInTreeMCAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsMCAL")));
- treeOfSharedClusterMCAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeMCAL")));
- }
- if (m_useFcal) {
- targetsInTreeFCAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsFCAL")));
- treeOfSharedClusterFCAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeFCAL")));
+ // TEST
+ boolean vetoThenRetainPhotons = true;
+ List<Cluster> vetoedPhotons = new Vector<Cluster>();
+ if (vetoThenRetainPhotons) {
+ vetoedPhotons = event.get(Cluster.class, "VetoedPhotonClustersMinusMuonHitsAndMipHits");
+ for (Cluster clus : vetoedPhotons) {
+ if (clus.getCalorimeterHits().size()>5) {
+ clumps.add(clus);
+ } else {
+ treesWithNoStructure .add(clus);
+ }
+ }
}
+
+ // Also make book-keeping maps that record what subclusters or hits
+ // belong to what tree:
Map<Cluster, List<Cluster>> targetsInTree = new HashMap<Cluster, List<Cluster>>();
Map<Cluster, Cluster> treeOfSharedCluster = new HashMap<Cluster,Cluster>();
- targetsInTree.putAll(targetsInTreeECAL);
- targetsInTree.putAll(targetsInTreeHCAL);
- treeOfSharedCluster.putAll(treeOfSharedClusterECAL);
- treeOfSharedCluster.putAll(treeOfSharedClusterHCAL);
- if (m_useMucalBarrel || m_useMucalEndcap) {
- targetsInTree.putAll(targetsInTreeMCAL);
- treeOfSharedCluster.putAll(treeOfSharedClusterMCAL);
- }
- if (m_useFcal) {
- targetsInTree.putAll(targetsInTreeFCAL);
- treeOfSharedCluster.putAll(treeOfSharedClusterFCAL);
+ {
+ Set<Cluster> targets = new HashSet<Cluster>();
+ targets.addAll(mips);
+ targets.addAll(clumps);
+ targets.addAll(treesWithNoStructure);
+ Map<Long,Cluster> mapCellToTarget = new HashMap<Long,Cluster>();
+ Map<Long,Cluster> mapCellToLeftover = new HashMap<Long,Cluster>();
+ for (Cluster target : targets) {
+ for (CalorimeterHit hit : target.getCalorimeterHits()) {
+ long id = hit.getCellID();
+ Cluster test = mapCellToTarget.get(id);
+ if (test != null) { throw new AssertionError("Book-keeping error"); }
+ mapCellToTarget.put(id, target);
+ }
+ }
+ for (Cluster clus : leftoverHitClusters) {
+ for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+ long id = hit.getCellID();
+ Cluster test1 = mapCellToTarget.get(id);
+ Cluster test2 = mapCellToLeftover.get(id);
+ if (test1 != null) { throw new AssertionError("Book-keeping error"); }
+ if (test2 != null) { throw new AssertionError("Book-keeping error"); }
+ mapCellToLeftover.put(id, clus);
+ }
+ }
+ for (Cluster tree : dTreeClusters) {
+ Set<Cluster> matchedTargets = new HashSet<Cluster>();
+ for (CalorimeterHit hit : tree.getCalorimeterHits()) {
+ long id = hit.getCellID();
+ Cluster target = mapCellToTarget.get(id);
+ if (target != null) {
+ matchedTargets.add(target);
+ }
+ Cluster shared = mapCellToLeftover.get(id);
+ if (shared != null) {
+ Cluster test = treeOfSharedCluster.get(shared);
+ if (test != null && test != tree) { throw new AssertionError("Book-keeping error: Shared/leftover cluster with "+shared.getCalorimeterHits().size()+" was found inside a tree of "+tree.getCalorimeterHits().size()+" hits but had already been seen inside a tree of "+test.getCalorimeterHits().size()+" hits."); }
+ treeOfSharedCluster.put(shared, tree);
+ }
+ }
+ List<Cluster> matchedTargetsList = new Vector<Cluster>();
+ matchedTargetsList.addAll(matchedTargets);
+ targetsInTree.put(tree, matchedTargetsList);
+ }
}
- // Legacy maps, no longer used.
- Map<Cluster, Cluster> treeOfMip = new HashMap<Cluster, Cluster>();
- Map<Cluster, Cluster> treeOfClump = new HashMap<Cluster,Cluster>();
- Map<Cluster, Cluster> treeOfLeftoverHits = new HashMap<Cluster,Cluster>();
-
- // Identify the start point of showers
- m_findCluster.process(m_event); // picks up geometry
-
// Match tracks
// ------------
Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
- // Handle photons
- List<Cluster> chargedHadronLikePhotons = new Vector<Cluster>();
- List<Cluster> modifiedPhotonClusters = new Vector<Cluster>();
- List<Cluster> photonLikePhotons = new Vector<Cluster>();
- List<Cluster> electronClusters = new Vector<Cluster>();
- List<Track> electronTracks = new Vector<Track>();
- photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, trackList, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
-
- // Resume track matching
- List<Cluster> allMatchableClusters = new Vector<Cluster>();
- allMatchableClusters.addAll(mips);
- allMatchableClusters.addAll(clumps);
- if (m_allowPhotonSeeds) {
- allMatchableClusters.addAll(chargedHadronLikePhotons);
- }
- allMatchableClusters.addAll(leftoverHitClusters); // DANGER -- leftoverHitClusters can be large!
- allMatchableClusters.addAll(treesWithNoStructure);
- if (allMatchableClusters.contains(null)) { throw new AssertionError("Book-keeping error: null cluster in list allMatchableClusters"); }
-
- if (m_debug) { System.out.println("Attempting to match "+allMatchableClusters.size()+" matchable clusters to "+trackList.size()+" tracks"); }
- for (Track tr : trackList) {
- if (electronTracks.contains(tr)) {
- continue; // Those are already assigned!
- }
- Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, allMatchableClusters);
- if (matchedCluster != null) {
- if (leftoverHitClusters.contains(matchedCluster)) {
- if (m_debugSeedSplitting) {
- // Debug printout
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Hep3Vector interceptPoint = null;
- if (result != null) {
- interceptPoint = result.getInterceptPoint();
- }
- if (interceptPoint != null) {
- double primaryDist = proximity(matchedCluster, interceptPoint);
- int innermostLayerOfMatchedCluster = 99;
- for (CalorimeterHit hit : matchedCluster.getCalorimeterHits()) {
- int layer = getLayer(hit);
- if (layer < innermostLayerOfMatchedCluster) { innermostLayerOfMatchedCluster = layer; }
- }
- System.out.println("DEBUG: Matched track with p="+momentum(tr).magnitude()+" to leftoverHitCluster with "+matchedCluster.getCalorimeterHits().size()+" hits at proximity="+primaryDist+" with innermost layer "+innermostLayerOfMatchedCluster);
- }
- Map<MCParticle, List<SimCalorimeterHit>> truthInfo = truthFromCluster(matchedCluster);
- for (MCParticle part : truthInfo.keySet()) {
- List<SimCalorimeterHit> truthHits = truthInfo.get(part);
- System.out.println("DEBUG: -> contribution of "+truthHits.size()+" hits from particle with p="+part.getMomentum().magnitude());
- }
- }
- Cluster tree = treeOfSharedCluster.get(matchedCluster);
- if (tree != null) {
- List<Cluster> targets = targetsInTree.get(tree);
- if (m_debugSeedSplitting) {
- // Debug printout
- for (Cluster target : targets) {
- String idname = new String("UNKNOWN");
- if (mipsOld.contains(target)) {
- idname = new String("MipOld");
- } else if (mipsNew.contains(target)) {
- idname = new String("MipNew");
- } else if (clumps.contains(target)) {
- idname = new String("Clump");
- }
- MCParticle targetTruth = quoteDominantParticle(target);
- double targetTruthMom = targetTruth.getMomentum().magnitude();
- System.out.println("DEBUG: -> structure found: "+idname+" with "+target.getCalorimeterHits().size()+" hits from particle with p="+targetTruthMom);
- }
- }
- if (targets.size()>0) {
- Cluster rematchedSubCluster = m_trackClusterMatcher.matchTrackToCluster(tr, targets);
- if (rematchedSubCluster != null) {
- matchedCluster = rematchedSubCluster; // CHANGES BEHAVIOUR!
- if (m_debugSeedSplitting) {
- // Debug printout
- String idname = new String("UNKNOWN");
- if (mipsOld.contains(rematchedSubCluster)) {
- idname = new String("MipOld");
- } else if (mipsNew.contains(rematchedSubCluster)) {
- idname = new String("MipNew");
- } else if (clumps.contains(rematchedSubCluster)) {
- idname = new String("Clump");
- }
- MCParticle targetTruth = quoteDominantParticle(rematchedSubCluster);
- double targetTruthMom = targetTruth.getMomentum().magnitude();
- if (rematchedSubCluster != null) {
- System.out.println("DEBUG: -> Rematch to: "+idname+" with "+rematchedSubCluster.getCalorimeterHits().size()+" hits from particle with p="+targetTruthMom);
- }
- }
- } else {
- if (m_debugSeedSplitting) {
- // Debug printout
- System.out.println("DEBUG: -> Rematch failed (no targets passed track-matching)");
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Hep3Vector interceptPoint = null;
- if (result != null) {
- interceptPoint = result.getInterceptPoint();
- }
- if (interceptPoint != null) {
- for (Cluster target : targets) {
- double dist = proximity(target, interceptPoint);
- int innermostLayerOfTarget = 99;
- for (CalorimeterHit hit : target.getCalorimeterHits()) {
- int layer = getLayer(hit);
- if (layer < innermostLayerOfTarget) { innermostLayerOfTarget = layer; }
- }
- System.out.println("DEBUG: * Target with "+target.getCalorimeterHits().size()+" hits at dist="+dist+" with innermost layer="+innermostLayerOfTarget);
- }
- }
- }
- }
- }
- }
- } else if (treesWithNoStructure.contains(matchedCluster)) {
- if (m_debugSeedSplitting) {
- System.out.println("DEBUG: Matched track with p="+momentum(tr).magnitude()+" to block with "+matchedCluster.getCalorimeterHits().size()+" hits.");
- }
- }
- tracksMatchedToClusters.put(tr, matchedCluster);
- List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
- if (clusTrList == null) {
- clusTrList = new Vector<Track>();
- clustersMatchedToTracks.put(matchedCluster, clusTrList);
- }
- clusTrList.add(tr);
+ for (String mapName : m_inputTrackClusterMaps) {
+ Map<Track,Cluster> currentMap = (Map<Track,Cluster>)(event.get(mapName));
+ for (Track tr : currentMap.keySet()) {
+ // First, check we don't already have an assignment for this track
+ if (tr == null) { throw new AssertionError("Null track!"); }
+ if (tracksMatchedToClusters.get(tr) != null) { throw new AssertionError("Multiple entries for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()); }
+ // Now do the book-keeping
+ Cluster clus = currentMap.get(tr);
+ if (clus == null) { throw new AssertionError("Null cluster!"); }
+ tracksMatchedToClusters.put(tr, clus);
+ List<Track> tracksOfThisCluster = clustersMatchedToTracks.get(clus);
+ if (tracksOfThisCluster == null) { tracksOfThisCluster = new Vector<Track>(); clustersMatchedToTracks.put(clus, tracksOfThisCluster); }
+ tracksOfThisCluster.add(tr);
}
}
- // Optionally, split photon seeds
- List<Cluster> photonFragments = new Vector<Cluster>();
- if (m_splitPhotonSeeds) {
- splitPhotonSeeds(clustersMatchedToTracks, tracksMatchedToClusters, modifiedPhotonClusters, electronClusters, photonLikePhotons, chargedHadronLikePhotons, photonFragments, mipsOld, mipsNew, mips, clumps, treeOfMip, treeOfClump, treeOfLeftoverHits, treesWithNoStructure);
- }
// Unmatched tracks
List<Track> unmatchedTracks = new Vector<Track>();
unmatchedTracks.addAll(trackList);
unmatchedTracks.removeAll(tracksMatchedToClusters.keySet());
List<Track> unmatchedTracksThatDontReachCalorimeter = new Vector<Track>();
for (Track tr : unmatchedTracks) {
- LocalHelixExtrapolationTrackClusterMatcher debugTrackMatch = new LocalHelixExtrapolationTrackClusterMatcher(m_findCluster);
- debugTrackMatch.process(m_event);
- Cluster debugMatchedCluster = debugTrackMatch.matchTrackToCluster(tr, allMatchableClusters);
- if (debugMatchedCluster != null) {
- // This can happen sometimes if it pointed to a photon that was broken up severely.
- // In any case, it clearly pointed to the calorimeter so we shouldn't
- // add on the track momentum (that would be double-counting)
+ HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
+ Hep3Vector interceptPoint = null;
+ if (result != null) {
+ interceptPoint = result.getInterceptPoint();
+ }
+ if (interceptPoint == null) {
+ // No valid extrap to calorimeter
+ unmatchedTracksThatDontReachCalorimeter.add(tr);
} else {
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Hep3Vector interceptPoint = null;
- if (result != null) {
- interceptPoint = result.getInterceptPoint();
- }
- if (interceptPoint == null) {
- // No valid extrap to calorimeter
- unmatchedTracksThatDontReachCalorimeter.add(tr);
- } else {
- // Reached calorimeter, but no match => we'll treat as NH or similar
- }
+ // Reached calorimeter, but no match => we'll treat as NH or similar
}
}
@@ -488,193 +325,11 @@
}
}
- ShowerPointFinder showerFinder = new ShowerPointFinder(m_findCluster, allHits, tracksMatchedToClusters);
- Map<Track,BasicCluster> MapTrkToMIP = showerFinder.findMips();
- event.put("ShowerFinderMapTrackToMip", MapTrkToMIP);
- List<Cluster> preShowerMips = new Vector<Cluster>();
- preShowerMips.addAll(MapTrkToMIP.values());
- event.put("ShowerFinderMips", preShowerMips);
- event.getMetaData(preShowerMips).setTransient(true);
-
- // Figure out whether tracks were uniquely matched or not:
- Set<Track> uniquelyMatchedTracks = new HashSet<Track>();
- Set<Track> ambiguouslyMatchedTracks = new HashSet<Track>();
- Map<Track, Cluster> tweakedTracksMatchedToClusters = new HashMap<Track,Cluster>();
- Map<Cluster, Track> clustersMatchedToTweakedTracks = new HashMap<Cluster, Track>();
- List<Track> tweakedTracks = new Vector<Track>();
- Map<Track, Track> mapOrigTrackToTweakedTrack = new HashMap<Track,Track>();
- handleTrackMatchingAmbiguities(clustersMatchedToTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracksMatchedToClusters, clustersMatchedToTweakedTracks, tweakedTracks, mapOrigTrackToTweakedTrack);
-
- // Check for cases where seed cluster is just plain too big
- List<Track> flaggedBadTracks = new Vector<Track>();
- for (Track tr : tweakedTracks) {
- if (electronTracks.contains(tr)) {
- continue; // Electron clusters are not too big by construction (E/p ~ 1)
- }
- Cluster seed = tweakedTracksMatchedToClusters.get(tr);
- double seedEnergy = energy(seed);
- double tolerance = 3.0;
- boolean testValidEoverP = testEoverP_oneSided(seedEnergy, tr, tolerance);
- if (!testValidEoverP) {
- // More than 3sigma off -- broken.
- flaggedBadTracks.add(tr);
- }
- }
- for (Track tr : flaggedBadTracks) {
- // Track's seed has E>>p -- split it up.
- // First find the seed using the parent track...
- Cluster seed = tweakedTracksMatchedToClusters.get(tr);
-
- // We'll be redoing the matching, so if we have several tracks
- // bundled into one, we should split them up:
- List<Track> individualTracks = new Vector<Track>();
- if (tr instanceof MultipleTrackTrack) {
- individualTracks.addAll(tr.getTracks());
- } else {
- individualTracks.add(tr);
- }
-
- // Split the seed up
- Map<Track,Cluster> splitOutputMap = new HashMap<Track,Cluster>();
- List<Cluster> splitClusters = splitPhoton(seed, individualTracks, splitOutputMap, 10.0, false);
- if (m_debugSeedSplitting) {
- System.out.println("DEBUG: Split big cluster with "+seed.getCalorimeterHits().size()+" hits into pieces...");
- for (Track tmpTr : splitOutputMap.keySet()) {
- double p = (new BasicHep3Vector(tmpTr.getMomentum())).magnitude();
- Cluster clus = splitOutputMap.get(tmpTr);
- System.out.println(" * Track with p="+p+" mapped to piece with "+clus.getCalorimeterHits().size()+" hits.");
- }
- }
-
- // Remove old listings / book-keeping info
- mipsOld.remove(seed);
- mipsNew.remove(seed);
- mips.remove(seed);
- clumps.remove(seed);
- leftoverHitClusters.remove(seed);
- treesWithNoStructure.remove(seed);
- clustersMatchedToTracks.remove(seed);
- tweakedTracksMatchedToClusters.remove(tr);
- clustersMatchedToTweakedTracks.remove(seed);
- tweakedTracks.remove(tr);
- for (Track dauTr : individualTracks) {
- tracksMatchedToClusters.remove(dauTr);
- uniquelyMatchedTracks.remove(dauTr);
- ambiguouslyMatchedTracks.remove(dauTr);
- mapOrigTrackToTweakedTrack.remove(dauTr);
- }
- List<Cluster> treesContainingOldSeed = new Vector<Cluster>();
- for (Cluster tree : targetsInTree.keySet()) {
- List<Cluster> targets = targetsInTree.get(tree);
- if (targets.contains(seed)) {
- treesContainingOldSeed.add(tree);
- targets.remove(seed);
- }
- }
-
- // Some tracks in the bundle may not have a seed cluster any more. They will be jettisoned.
- // This is rather brutal -- really we should refine the matching somehow (iteratively?)
- // For the rest, see if they're now ambiguous or unique in the matching to newly broken-up seeds
- Map<Cluster, List<Track>> local_clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
- Map<Track,Cluster> local_tracksMatchedToClusters = new HashMap<Track,Cluster>();
- Set<Track> local_uniquelyMatchedTracks = new HashSet<Track>();
- Set<Track> local_ambiguouslyMatchedTracks = new HashSet<Track>();
- Map<Track, Cluster> local_tweakedTracksMatchedToClusters = new HashMap<Track, Cluster>();
- Map<Cluster, Track> local_clustersMatchedToTweakedTracks = new HashMap<Cluster, Track>();
- List<Track> local_tweakedTracks = new Vector<Track>();
- Map<Track, Track> local_mapOrigTrackToTweakedTrack = new HashMap<Track, Track>();
- for (Track dauTr : individualTracks) {
- Cluster newSeed = splitOutputMap.get(dauTr);
- if (newSeed != null) {
- List<Track> tracksMatchedToSeed = local_clustersMatchedToTracks.get(newSeed);
- if (tracksMatchedToSeed == null) { tracksMatchedToSeed = new Vector<Track>(); local_clustersMatchedToTracks.put(newSeed, tracksMatchedToSeed); }
- tracksMatchedToSeed.add(dauTr);
- local_tracksMatchedToClusters.put(dauTr, newSeed);
- }
- }
- handleTrackMatchingAmbiguities(local_clustersMatchedToTracks, local_tracksMatchedToClusters, local_uniquelyMatchedTracks, local_ambiguouslyMatchedTracks, local_tweakedTracksMatchedToClusters, local_clustersMatchedToTweakedTracks, local_tweakedTracks, local_mapOrigTrackToTweakedTrack);
-
- // Book-keeping
- Set<Cluster> newSeeds = local_clustersMatchedToTracks.keySet();
- Set<Cluster> splitOutputMapSeeds = new HashSet<Cluster>();
- splitOutputMapSeeds.addAll(splitOutputMap.values());
-
- // Any piece not matched to a track becomes a photon
- for (Cluster piece : splitClusters) {
- if (!newSeeds.contains(piece)) {
- chargedHadronLikePhotons.add(piece);
- modifiedPhotonClusters.add(piece);
- }
- }
-
- // Any track not matched to anything is junked
- for (Track subTr : individualTracks) {
- if (local_tracksMatchedToClusters.get(subTr) == null) {
- if (m_debugSeedSplitting) { System.out.println("DEBUG: After splitting cluster, couldn't find a match for track -- junking"); }
- unmatchedTracks.add(subTr);
- }
- }
-
- // Now, for each track matched to a seed, re-test E/p. If it passes, keep it.
- for (Track newTweakedTrack : local_tweakedTracks) {
- Cluster newSeed = local_tweakedTracksMatchedToClusters.get(newTweakedTrack);
- if (newSeed == null) { throw new AssertionError("Seed is null, but it shouldn't be -- book-keeping error"); }
- if (!newSeeds.contains(newSeed)) { throw new AssertionError("New seed is not in newSeeds!"); }
- if (unmatchedTracks.contains(newTweakedTrack)) { throw new AssertionError("Book-keeping error!"); }
- double newSeedEnergy = energy(newSeed);
- double tolerance = 3.0;
- boolean testValidEoverP_new = testEoverP_oneSided(newSeedEnergy, newTweakedTrack, tolerance);
-
- if (testValidEoverP_new) {
- // REPLACE -- we managed to break off a piece of acceptable size.
- if (m_debugSeedSplitting) { System.out.println("DEBUG: Will replace old seed of "+seed.getCalorimeterHits().size()+" hits with new seed of "+newSeed.getCalorimeterHits().size()+" hits plus photon pieces"); }
- // Check if this is one track or several bundled together:
- List<Track> individualTracksOfNewTweakedTrack = new Vector<Track>();
- if (newTweakedTrack instanceof MultipleTrackTrack) {
- individualTracksOfNewTweakedTrack.addAll(newTweakedTrack.getTracks());
- ambiguouslyMatchedTracks.addAll(newTweakedTrack.getTracks());
- for (Track subTr : newTweakedTrack.getTracks()) {
- mapOrigTrackToTweakedTrack.put(subTr, newTweakedTrack);
- }
- } else {
- uniquelyMatchedTracks.add(newTweakedTrack);
- individualTracksOfNewTweakedTrack.add(newTweakedTrack);
- mapOrigTrackToTweakedTrack.put(newTweakedTrack, newTweakedTrack);
- }
- // Add new listings
- clumps.add(newSeed);
- clustersMatchedToTracks.put(newSeed, individualTracksOfNewTweakedTrack);
- for (Track subTr : individualTracksOfNewTweakedTrack) {
- tracksMatchedToClusters.put(subTr, newSeed);
- }
- tweakedTracks.add(newTweakedTrack);
- tweakedTracksMatchedToClusters.put(newTweakedTrack, newSeed);
- clustersMatchedToTweakedTracks.put(newSeed, newTweakedTrack);
- for (Cluster tree : treesContainingOldSeed) {
- List<Cluster> targets = targetsInTree.get(tree);
- targets.add(newSeed);
- }
- } else {
- // FAIL ENTIRELY
- if (m_debugSeedSplitting) {
- System.out.println("DEBUG: Unable to replace old seed of "+seed.getCalorimeterHits().size()+" hits with smaller seed. Will turn into photon and remove track from consideration.");
- System.out.println("DEBUG: Was working with "+local_tweakedTracks.size()+" local tweaked tracks from "+individualTracks.size()+" individual tracks");
- if (clustersMatchedToTracks.get(seed) != null) { System.out.println("OLD SEED MATCHED TO NON-NULL TRACK"); }
- }
- // Add new listings as a photon
- chargedHadronLikePhotons.add(newSeed);
- modifiedPhotonClusters.add(newSeed);
- unmatchedTracks.add(newTweakedTrack);
- }
- }
- }
-
// Track seeds
- Set<Cluster> seeds = clustersMatchedToTracks.keySet();
List<Cluster> seedLeftoverHitClusters = new Vector();
List<Cluster> nonSeedLeftoverHitClusters = new Vector();
for (Cluster clus : leftoverHitClusters) {
- if (seeds.contains(clus) ) {
+ if (clustersMatchedToTracks.keySet().contains(clus) ) {
seedLeftoverHitClusters.add(clus);
} else {
nonSeedLeftoverHitClusters.add(clus);
@@ -682,42 +337,26 @@
}
if ( seedLeftoverHitClusters.size() + nonSeedLeftoverHitClusters.size() != leftoverHitClusters.size() ) { throw new AssertionError("Book-keeping error"); }
- List<Cluster> seedElectronClusters = new Vector();
- List<Cluster> seedHadronLikePhotonClusters = new Vector();
- List<Cluster> nonSeedHadronLikePhotonClusters = new Vector();
- List<Cluster> nonSeedPhotonLikePhotonClusters = new Vector();
- for (Cluster clus : electronClusters) {
- if (seeds.contains(clus)) {
- seedElectronClusters.add(clus);
- } else {
- throw new AssertionError("Electron cluster is not a seed!");
- }
- }
- for (Cluster clus : photonLikePhotons) {
- if (seeds.contains(clus)) {
- throw new AssertionError("Photon cluster is a seed!");
- } else {
- nonSeedPhotonLikePhotonClusters.add(clus);
- }
- }
- for (Cluster clus : chargedHadronLikePhotons) {
- if (seeds.contains(clus)) {
- seedHadronLikePhotonClusters.add(clus);
+ List<Cluster> seedPhotonClusters = new Vector();
+ List<Cluster> nonSeedPhotonClusters = new Vector();
+ for (Cluster clus : photons) {
+ if (clustersMatchedToTracks.keySet().contains(clus)) {
+ seedPhotonClusters.add(clus);
} else {
- nonSeedHadronLikePhotonClusters.add(clus);
+ nonSeedPhotonClusters.add(clus);
}
}
- if (seedElectronClusters.size() + seedHadronLikePhotonClusters.size() + nonSeedHadronLikePhotonClusters.size() + nonSeedPhotonLikePhotonClusters.size() != modifiedPhotonClusters.size() ) { throw new AssertionError("Book-keeping error"); }
// Sort matched tracks by momentum
List<Track> tracksSortedByMomentum = new Vector<Track>();
- for (Track tr : tweakedTracksMatchedToClusters.keySet()) {
+ for (Track tr : tracksMatchedToClusters.keySet()) {
tracksSortedByMomentum.add(tr);
}
Collections.sort(tracksSortedByMomentum, new MomentumSort());
+ Set<Cluster> seeds = clustersMatchedToTracks.keySet();
if (m_debug) {
- debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, uniquelyMatchedTracks, ambiguouslyMatchedTracks, tweakedTracks, seeds, tracksSortedByMomentum, tweakedTracksMatchedToClusters);
+ debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, tracksSortedByMomentum);
}
// Prep for linking
@@ -741,7 +380,7 @@
// Any structure?
Cluster tree = treeOfSharedCluster.get(clus);
List<Cluster> targetsInThisTree = targetsInTree.get(tree);
- if (targetsInThisTree.size() > 0) {
+ if (targetsInThisTree != null && targetsInThisTree.size() > 0) {
// These "leftover hit clusters" can include a LOT of halo hits from
// inside a DTree... need to break them into bite-sized pieces for
// the sharing algorithm
@@ -763,21 +402,22 @@
} else if (hitFCAL && m_useFcal) {
leftoverHitClustersToShareFCAL.add(tmpClus);
} else {
- throw new AssertionError("Unknown subdetector");
+ throw new AssertionError("Unknown subdetector: "+hit.getSubdetector().getName());
}
}
} else {
// No structure
+ // or target list was null because cluster was part of a pre-shower MIP
smallClustersToShare.add(clus); // TEST
}
}
}
- smallClustersToShare.addAll(photonFragments);
+ //smallClustersToShare.addAll(photonFragments);
List<Cluster> linkableClusters = new Vector<Cluster>();
- linkableClusters.addAll(modifiedPhotonClusters);
+ linkableClusters.addAll(photons);
linkableClusters.addAll(linkableClustersExcludingPhotons);
- // Initially, cheat
+ // Set up scoring
resetPotentialLinks();
if (m_cheatOnScoring) {
initPotentialLinks_cheating(linkableClusters, clustersMatchedToTracks);
@@ -796,8 +436,8 @@
initPotentialLinks_MipClump(mipsNew, clumps, scaleFactorTrackToClump*penaltyForNewMips, true);
initPotentialLinks_MipMisc(mipsOld, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
initPotentialLinks_MipMisc(mipsNew, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
- initPotentialLinks_MipMisc(mipsOld, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
- initPotentialLinks_MipMisc(mipsNew, seedHadronLikePhotonClusters, thresholdForProximity, "Photon");
+ initPotentialLinks_MipMisc(mipsOld, seedPhotonClusters, thresholdForProximity, "Photon");
+ initPotentialLinks_MipMisc(mipsNew, seedPhotonClusters, thresholdForProximity, "Photon");
initPotentialLinks_MipMisc(mipsOld, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
initPotentialLinks_MipMisc(mipsNew, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
@@ -809,7 +449,7 @@
initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
- initPotentialLinks_Cone(seeds, linkableClustersExcludingPhotons, allHits, tracksMatchedToClusters, clustersMatchedToTweakedTracks, 0.95, 0.9);
+ initPotentialLinks_Cone(seeds, linkableClustersExcludingPhotons, allHits, tracksMatchedToClusters, clustersMatchedToTracks, 0.95, 0.9);
}
// Done making links. Prep & build skeletons:
@@ -858,45 +498,39 @@
List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
// Small clusters
- MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
- proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(40.0, maxDistanceForSmallClusters));
- if (excludePhotonsFromCone) {
- proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), modifiedPhotonClusters));
- } else {
- proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
+ {
+ MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
+ proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(40.0, maxDistanceForSmallClusters));
+ proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), photons));
+ SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAndConeAlg);
+ sharedSmallDTreeClusters.createShares(linkableClusters);
+ sharedSmallDTreeClusters.rebuildHints();
+ allSharedClusters.add(sharedSmallDTreeClusters);
}
- SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAndConeAlg);
- sharedSmallDTreeClusters.createShares(linkableClusters);
- sharedSmallDTreeClusters.rebuildHints();
- allSharedClusters.add(sharedSmallDTreeClusters);
// ECAL halo
- DTreeClusterSharingAlgorithm dTreeSharingAlgECAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForDTreeClustersECAL);
- SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, dTreeSharingAlgECAL);
- sharedLeftoverHitClustersECAL.createShares(linkableClusters);
- sharedLeftoverHitClustersECAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersECAL);
+ {
+ DTreeClusterSharingAlgorithm dTreeSharingAlgECAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForDTreeClustersECAL);
+ SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, dTreeSharingAlgECAL);
+ sharedLeftoverHitClustersECAL.createShares(linkableClusters);
+ sharedLeftoverHitClustersECAL.rebuildHints();
+ allSharedClusters.add(sharedLeftoverHitClustersECAL);
+ }
// HCAL halo
- DTreeClusterSharingAlgorithm dTreeSharingAlgHCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, maxDistanceForDTreeClustersHCAL);
- SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, dTreeSharingAlgHCAL);
- sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
- sharedLeftoverHitClustersHCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+ {
+ DTreeClusterSharingAlgorithm dTreeSharingAlgHCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, maxDistanceForDTreeClustersHCAL);
+ SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, dTreeSharingAlgHCAL);
+ sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
+ sharedLeftoverHitClustersHCAL.rebuildHints();
+ allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+ }
// Muon hit sharing...
- if (m_useMucalBarrel || m_useMucalEndcap) {
+ {
DTreeClusterSharingAlgorithm dTreeSharingAlgMCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, maxDistanceForDTreeClustersMCAL);
SharedClusterGroup sharedLeftoverHitClustersMCAL = new SharedClusterGroup(leftoverHitClustersToShareMCAL, dTreeSharingAlgMCAL);
sharedLeftoverHitClustersMCAL.createShares(linkableClusters);
sharedLeftoverHitClustersMCAL.rebuildHints();
allSharedClusters.add(sharedLeftoverHitClustersMCAL);
}
- // Forward hit sharing...
- if (m_useFcal) {
- DTreeClusterSharingAlgorithm dTreeSharingAlgFCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, maxDistanceForDTreeClustersFCAL);
- SharedClusterGroup sharedLeftoverHitClustersFCAL = new SharedClusterGroup(leftoverHitClustersToShareFCAL, dTreeSharingAlgFCAL);
- sharedLeftoverHitClustersFCAL.createShares(linkableClusters);
- sharedLeftoverHitClustersFCAL.rebuildHints();
- allSharedClusters.add(sharedLeftoverHitClustersFCAL);
- }
// Iterate to build clusters:
for (int iIter=0; iIter<10; iIter++) {
@@ -905,7 +539,7 @@
newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
Map<Track, Set<Cluster>> mapTrackToVetoedAdditionsDueToTrackSeed = new HashMap<Track, Set<Cluster>>();
- clusteringIteration(tracksSortedByMomentum, tweakedTracksMatchedToClusters, electronTracks, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, allSharedClusters, newMapTrackToThreshold, newMapTrackToTolerance);
+ clusteringIteration(tracksSortedByMomentum, tracksMatchedToClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, allSharedClusters, newMapTrackToThreshold, newMapTrackToTolerance);
// Check on E/p, update scoring, make over-rides.
@@ -914,7 +548,7 @@
Set<Track> tracksWithVetoedLinkToUnusedCluster = new HashSet<Track>();
List<Set<Track>> jetLinks = new Vector<Set<Track>>();
- updateScoring(uniquelyMatchedTracks, ambiguouslyMatchedTracks, tracksSortedByMomentum, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, tracksWithEoverPTooLow, tracksWithEoverPMuchTooLow, tracksWithVetoedLinkToUnusedCluster, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks, linkableClusters);
+ updateScoring(tracksSortedByMomentum, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, tracksWithEoverPTooLow, tracksWithEoverPMuchTooLow, tracksWithVetoedLinkToUnusedCluster, newMapTrackToVetoedAdditions, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks, linkableClusters);
lookForVetoedLinks(tracksSortedByMomentum, tracksWithEoverPMuchTooLow, newMapTrackToShowerComponents, newMapShowerComponentToTrack, mapTrackToVetoedAdditionsDueToTrackSeed, jetLinks);
@@ -976,7 +610,7 @@
}
}
- applyOverrides(linkableClusters, photonLikePhotons, electronClusters, tracksMatchedToClusters, newMapShowerComponentToTrack, newMapTrackToShowerComponents, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, newMapTrackToVetoedAdditions, newMapTrackToTolerance, allSharedClusters);
+ applyOverrides(linkableClusters, tracksMatchedToClusters, newMapShowerComponentToTrack, newMapTrackToShowerComponents, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, newMapTrackToVetoedAdditions, newMapTrackToTolerance, allSharedClusters);
// Book-keeping for cone-adding
boolean m_usePhotonsForConeTrackFix = false;
@@ -985,7 +619,7 @@
Track matchedTrack = newMapShowerComponentToTrack.get(clus);
Set<Track> matchedJet = newMapShowerComponentToJet.get(clus);
if (matchedTrack == null && matchedJet == null) {
- if (modifiedPhotonClusters.contains(clus) && !m_usePhotonsForConeTrackFix) {
+ if (photons.contains(clus) && !m_usePhotonsForConeTrackFix) {
// This is a photon and we aren't using them for the cone -- ignore it
} else {
unassignedClusters.add(clus);
@@ -994,18 +628,19 @@
}
ReassignClustersAlgorithm algorithm = null;
+ String mapName;
+ if (m_useSteveMipsForConeScoring) {
+ mapName = "TrackMipMap";
+ } else {
+ mapName = "ShowerFinderMapTrackToMip";
+ }
+ Map<Track, BasicCluster> mapTrackToMIP = (Map<Track, BasicCluster>) (m_event.get(mapName));
+ MIPGeometryHandler geomHandler = new LayerBasedMIPGeometryHandler(mapTrackToMIP, m_findCluster);
+ //MIPGeometryHandler geomHandler = new HelixTangentMIPGeometryHandler(mapTrackToMIP, m_findCluster);
+
if (m_useSimpleConeForReassignment) {
algorithm = new ConeReassignmentAlgorithm(1.00, m_findCluster);
} else {
- String mapName;
- if (m_useSteveMipsForConeScoring) {
- mapName = "TrackMipMap";
- } else {
- mapName = "ShowerFinderMapTrackToMip";
- }
- Map<Track, BasicCluster> mapTrackToMIP = (Map<Track, BasicCluster>) (m_event.get(mapName));
- MIPGeometryHandler geomHandler = new LayerBasedMIPGeometryHandler(mapTrackToMIP, m_findCluster);
- //MIPGeometryHandler geomHandler = new HelixTangentMIPGeometryHandler(mapTrackToMIP, m_findCluster);
algorithm = new ConeMIPReassignmentAlgorithm(geomHandler, 800.0, Math.PI*0.5);
}
if (m_fixSingleTracksWithCone) {
@@ -1014,33 +649,38 @@
// Only process tracks that aren't part of a jet:
Set<Track> jetOfTrack = mapTrackToJet.get(tr);
if (jetOfTrack == null) {
- checkTrackForReassignments(tr, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, unassignedClusters, newMapTrackToTolerance.get(tr), algorithm, tweakedTracksMatchedToClusters);
+ checkTrackForReassignments(tr, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, unassignedClusters, newMapTrackToTolerance.get(tr), algorithm);
}
}
}
if (m_fixJetsWithCone) {
for (Set<Track> jet : jets) {
- checkJetForReassignments(jet, newMapJetToShowerComponents, newMapShowerComponentToJet, allSharedClusters, unassignedClusters, algorithm, newMapShowerComponentToTrack, tweakedTracksMatchedToClusters, newMapTrackToShowerComponents);
+ checkJetForReassignments(jet, newMapJetToShowerComponents, newMapShowerComponentToJet, allSharedClusters, unassignedClusters, algorithm, newMapTrackToShowerComponents);
}
}
if (m_debug) {
- printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
+ printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, photons, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
}
// Outputs
- makeParticlesAndWriteOut(trackList, tracksSortedByMomentum, unmatchedTracksThatDontReachCalorimeter, mapOrigTrackToTweakedTrack,
- tracksMatchedToClusters, clustersMatchedToTracks,
- electronTracks, modifiedPhotonClusters,electronClusters, photonLikePhotons, chargedHadronLikePhotons,
- seedHadronLikePhotonClusters, nonSeedHadronLikePhotonClusters, nonSeedPhotonLikePhotonClusters,
- newMapTrackToShowerComponents, newMapShowerComponentToTrack,
- linkableClusters,
- mips, clumps, treesWithNoStructure, seedLeftoverHitClusters,
- jets, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet,
- allSharedClusters,
- smallClustersToShare, leftoverHitClustersToShare,
[truncated at 1000 lines; 2167 more skipped]