lcsim/src/org/lcsim/contrib/uiowa
diff -u -r1.59 -r1.60
--- ReclusterDTreeDriver.java 21 Oct 2008 23:05:48 -0000 1.59
+++ ReclusterDTreeDriver.java 22 Oct 2008 17:45:26 -0000 1.60
@@ -35,7 +35,7 @@
* in this package, which uses the implementation in
* org.lcsim.recon.cluster.directedtree developed by NIU).
*
- * @version $Id: ReclusterDTreeDriver.java,v 1.59 2008/10/21 23:05:48 mcharles Exp $
+ * @version $Id: ReclusterDTreeDriver.java,v 1.60 2008/10/22 17:45:26 mcharles Exp $
* @author Mat Charles <[log in to unmask]>
*/
@@ -59,7 +59,6 @@
protected boolean m_oldMipFinderCrossesTrees = true;
protected boolean m_useNewMipFinder = true;
protected boolean m_useOldMipFinder = true;
- protected boolean m_removePoorQualityMips = false; // DANGER: STILL USED
protected boolean m_clusterAsJets = true;
protected boolean m_ignorePunchThroughTracksForJets = true;
protected boolean m_useTracksThatDontReachCalorimeter = true;
@@ -68,7 +67,6 @@
protected boolean m_allowElectrons = true;
protected boolean m_allowPhotonSeeds = true;
- protected boolean m_splitPhotonSeeds = true;
protected boolean m_allPhotonsAreValidSeeds = true;
protected boolean m_checkForPhotonTrackOverlap = false;
@@ -149,7 +147,6 @@
// Read in
List<Cluster> dTreeClusters = event.get(Cluster.class, m_dTreeClusterListName);
List<Track> trackList = event.get(Track.class, m_inputTrackList);
- System.out.println("DEBUG: "+this.getClass().getName()+" read in list of "+trackList.size()+" tracks named "+m_inputTrackList);
List<Cluster> photons = event.get(Cluster.class, "PhotonClustersForDTree");
List<Cluster> largeClusters = dTreeClusters; // FIXME: NOT IDEAL! Perhaps run MST on DTree clusters?
if (trackList == null) { throw new AssertionError("Null track list!"); }
@@ -306,145 +303,6 @@
}
}
- /*
-
- // Handle photons
- List<Cluster> chargedHadronLikePhotons = new Vector<Cluster>();
- List<Cluster> modifiedPhotonClusters = new Vector<Cluster>();
- List<Cluster> photonLikePhotons = new Vector<Cluster>();
- List<Cluster> electronClusters = new Vector<Cluster>();
- List<Track> electronTracks = new Vector<Track>();
- photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, trackList, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
-
- // Resume track matching
- List<Cluster> allMatchableClusters = new Vector<Cluster>();
- allMatchableClusters.addAll(mips);
- allMatchableClusters.addAll(clumps);
- if (m_allowPhotonSeeds) {
- allMatchableClusters.addAll(chargedHadronLikePhotons);
- }
- allMatchableClusters.addAll(leftoverHitClusters); // DANGER -- leftoverHitClusters can be large!
- allMatchableClusters.addAll(treesWithNoStructure);
- if (allMatchableClusters.contains(null)) { throw new AssertionError("Book-keeping error: null cluster in list allMatchableClusters"); }
-
- if (m_debug) { System.out.println("Attempting to match "+allMatchableClusters.size()+" matchable clusters to "+trackList.size()+" tracks"); }
- for (Track tr : trackList) {
- if (electronTracks.contains(tr)) {
- continue; // Those are already assigned!
- }
- Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, allMatchableClusters);
- if (matchedCluster != null) {
- if (leftoverHitClusters.contains(matchedCluster)) {
- if (m_debugSeedSplitting) {
- // Debug printout
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Hep3Vector interceptPoint = null;
- if (result != null) {
- interceptPoint = result.getInterceptPoint();
- }
- if (interceptPoint != null) {
- double primaryDist = proximity(matchedCluster, interceptPoint);
- int innermostLayerOfMatchedCluster = 99;
- for (CalorimeterHit hit : matchedCluster.getCalorimeterHits()) {
- int layer = getLayer(hit);
- if (layer < innermostLayerOfMatchedCluster) { innermostLayerOfMatchedCluster = layer; }
- }
- System.out.println("DEBUG: Matched track with p="+momentum(tr).magnitude()+" to leftoverHitCluster with "+matchedCluster.getCalorimeterHits().size()+" hits at proximity="+primaryDist+" with innermost layer "+innermostLayerOfMatchedCluster);
- }
- Map<MCParticle, List<SimCalorimeterHit>> truthInfo = truthFromCluster(matchedCluster);
- for (MCParticle part : truthInfo.keySet()) {
- List<SimCalorimeterHit> truthHits = truthInfo.get(part);
- System.out.println("DEBUG: -> contribution of "+truthHits.size()+" hits from particle with p="+part.getMomentum().magnitude());
- }
- }
- Cluster tree = treeOfSharedCluster.get(matchedCluster);
- if (tree != null) {
- List<Cluster> targets = targetsInTree.get(tree);
- if (m_debugSeedSplitting) {
- // Debug printout
- for (Cluster target : targets) {
- String idname = new String("UNKNOWN");
- if (mipsOld.contains(target)) {
- idname = new String("MipOld");
- } else if (mipsNew.contains(target)) {
- idname = new String("MipNew");
- } else if (clumps.contains(target)) {
- idname = new String("Clump");
- }
- MCParticle targetTruth = quoteDominantParticle(target);
- double targetTruthMom = targetTruth.getMomentum().magnitude();
- System.out.println("DEBUG: -> structure found: "+idname+" with "+target.getCalorimeterHits().size()+" hits from particle with p="+targetTruthMom);
- }
- }
- if (targets.size()>0) {
- Cluster rematchedSubCluster = m_trackClusterMatcher.matchTrackToCluster(tr, targets);
- if (rematchedSubCluster != null) {
- matchedCluster = rematchedSubCluster; // CHANGES BEHAVIOUR!
- if (m_debugSeedSplitting) {
- // Debug printout
- String idname = new String("UNKNOWN");
- if (mipsOld.contains(rematchedSubCluster)) {
- idname = new String("MipOld");
- } else if (mipsNew.contains(rematchedSubCluster)) {
- idname = new String("MipNew");
- } else if (clumps.contains(rematchedSubCluster)) {
- idname = new String("Clump");
- }
- MCParticle targetTruth = quoteDominantParticle(rematchedSubCluster);
- double targetTruthMom = targetTruth.getMomentum().magnitude();
- if (rematchedSubCluster != null) {
- System.out.println("DEBUG: -> Rematch to: "+idname+" with "+rematchedSubCluster.getCalorimeterHits().size()+" hits from particle with p="+targetTruthMom);
- }
- }
- } else {
- if (m_debugSeedSplitting) {
- // Debug printout
- System.out.println("DEBUG: -> Rematch failed (no targets passed track-matching)");
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Hep3Vector interceptPoint = null;
- if (result != null) {
- interceptPoint = result.getInterceptPoint();
- }
- if (interceptPoint != null) {
- for (Cluster target : targets) {
- double dist = proximity(target, interceptPoint);
- int innermostLayerOfTarget = 99;
- for (CalorimeterHit hit : target.getCalorimeterHits()) {
- int layer = getLayer(hit);
- if (layer < innermostLayerOfTarget) { innermostLayerOfTarget = layer; }
- }
- System.out.println("DEBUG: * Target with "+target.getCalorimeterHits().size()+" hits at dist="+dist+" with innermost layer="+innermostLayerOfTarget);
- }
- }
- }
- }
- }
- }
- } else if (treesWithNoStructure.contains(matchedCluster)) {
- if (m_debugSeedSplitting) {
- System.out.println("DEBUG: Matched track with p="+momentum(tr).magnitude()+" to block with "+matchedCluster.getCalorimeterHits().size()+" hits.");
- }
- }
- tracksMatchedToClusters.put(tr, matchedCluster);
- List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
- if (clusTrList == null) {
- clusTrList = new Vector<Track>();
- clustersMatchedToTracks.put(matchedCluster, clusTrList);
- }
- clusTrList.add(tr);
- } else {
- System.out.println("DEBUG: Failed to match track with p="+momentum(tr).magnitude());
- }
- }
-
- // Optionally, split photon seeds
- List<Cluster> photonFragments = new Vector<Cluster>();
- if (m_splitPhotonSeeds) {
- splitPhotonSeeds(clustersMatchedToTracks, tracksMatchedToClusters, modifiedPhotonClusters, electronClusters, photonLikePhotons, chargedHadronLikePhotons, photonFragments, mipsOld, mipsNew, mips, clumps, treesWithNoStructure);
- }
-
- */
-
// Unmatched tracks
List<Track> unmatchedTracks = new Vector<Track>();
unmatchedTracks.addAll(trackList);
@@ -471,174 +329,6 @@
}
}
- /*
-
- // Check for cases where seed cluster is just plain too big
- List<Track> flaggedBadTracks = new Vector<Track>();
- for (Track tr : tweakedTracks) {
- if (electronTracks.contains(tr)) {
- continue; // Electron clusters are not too big by construction (E/p ~ 1)
- }
- Cluster seed = tweakedTracksMatchedToClusters.get(tr);
- double seedEnergy = energy(seed);
- double tolerance = 3.0;
- boolean testValidEoverP = testEoverP_oneSided(seedEnergy, tr, tolerance);
- if (!testValidEoverP) {
- // More than 3sigma off -- broken.
- flaggedBadTracks.add(tr);
- }
- }
- for (Track tr : flaggedBadTracks) {
- // Track's seed has E>>p -- split it up.
- // First find the seed using the parent track...
- Cluster seed = tweakedTracksMatchedToClusters.get(tr);
-
- // We'll be redoing the matching, so if we have several tracks
- // bundled into one, we should split them up:
- List<Track> individualTracks = new Vector<Track>();
- if (tr instanceof MultipleTrackTrack) {
- individualTracks.addAll(tr.getTracks());
- } else {
- individualTracks.add(tr);
- }
-
- // Split the seed up
- Map<Track,Cluster> splitOutputMap = new HashMap<Track,Cluster>();
- List<Cluster> splitClusters = splitPhoton(seed, individualTracks, splitOutputMap, 10.0, false);
- if (m_debugSeedSplitting) {
- System.out.println("DEBUG: Split big cluster with "+seed.getCalorimeterHits().size()+" hits into pieces...");
- for (Track tmpTr : splitOutputMap.keySet()) {
- double p = (new BasicHep3Vector(tmpTr.getMomentum())).magnitude();
- Cluster clus = splitOutputMap.get(tmpTr);
- System.out.println(" * Track with p="+p+" mapped to piece with "+clus.getCalorimeterHits().size()+" hits.");
- }
- }
-
- // Remove old listings / book-keeping info
- mipsOld.remove(seed);
- mipsNew.remove(seed);
- mips.remove(seed);
- clumps.remove(seed);
- leftoverHitClusters.remove(seed);
- treesWithNoStructure.remove(seed);
- clustersMatchedToTracks.remove(seed);
- tweakedTracksMatchedToClusters.remove(tr);
- clustersMatchedToTweakedTracks.remove(seed);
- tweakedTracks.remove(tr);
- for (Track dauTr : individualTracks) {
- tracksMatchedToClusters.remove(dauTr);
- uniquelyMatchedTracks.remove(dauTr);
- ambiguouslyMatchedTracks.remove(dauTr);
- mapOrigTrackToTweakedTrack.remove(dauTr);
- }
- List<Cluster> treesContainingOldSeed = new Vector<Cluster>();
- for (Cluster tree : targetsInTree.keySet()) {
- List<Cluster> targets = targetsInTree.get(tree);
- if (targets.contains(seed)) {
- treesContainingOldSeed.add(tree);
- targets.remove(seed);
- }
- }
-
- // Some tracks in the bundle may not have a seed cluster any more. They will be jettisoned.
- // This is rather brutal -- really we should refine the matching somehow (iteratively?)
- // For the rest, see if they're now ambiguous or unique in the matching to newly broken-up seeds
- Map<Cluster, List<Track>> local_clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
- Map<Track,Cluster> local_tracksMatchedToClusters = new HashMap<Track,Cluster>();
- Set<Track> local_uniquelyMatchedTracks = new HashSet<Track>();
- Set<Track> local_ambiguouslyMatchedTracks = new HashSet<Track>();
- Map<Track, Cluster> local_tweakedTracksMatchedToClusters = new HashMap<Track, Cluster>();
- Map<Cluster, Track> local_clustersMatchedToTweakedTracks = new HashMap<Cluster, Track>();
- List<Track> local_tweakedTracks = new Vector<Track>();
- Map<Track, Track> local_mapOrigTrackToTweakedTrack = new HashMap<Track, Track>();
- for (Track dauTr : individualTracks) {
- Cluster newSeed = splitOutputMap.get(dauTr);
- if (newSeed != null) {
- List<Track> tracksMatchedToSeed = local_clustersMatchedToTracks.get(newSeed);
- if (tracksMatchedToSeed == null) { tracksMatchedToSeed = new Vector<Track>(); local_clustersMatchedToTracks.put(newSeed, tracksMatchedToSeed); }
- tracksMatchedToSeed.add(dauTr);
- local_tracksMatchedToClusters.put(dauTr, newSeed);
- }
- }
- handleTrackMatchingAmbiguities(local_clustersMatchedToTracks, local_tracksMatchedToClusters, local_uniquelyMatchedTracks, local_ambiguouslyMatchedTracks, local_tweakedTracksMatchedToClusters, local_clustersMatchedToTweakedTracks, local_tweakedTracks, local_mapOrigTrackToTweakedTrack);
-
- // Book-keeping
- Set<Cluster> newSeeds = local_clustersMatchedToTracks.keySet();
- Set<Cluster> splitOutputMapSeeds = new HashSet<Cluster>();
- splitOutputMapSeeds.addAll(splitOutputMap.values());
-
- // Any piece not matched to a track becomes a photon
- for (Cluster piece : splitClusters) {
- if (!newSeeds.contains(piece)) {
- chargedHadronLikePhotons.add(piece);
- modifiedPhotonClusters.add(piece);
- }
- }
-
- // Any track not matched to anything is junked
- for (Track subTr : individualTracks) {
- if (local_tracksMatchedToClusters.get(subTr) == null) {
- if (m_debugSeedSplitting) { System.out.println("DEBUG: After splitting cluster, couldn't find a match for track -- junking"); }
- unmatchedTracks.add(subTr);
- }
- }
-
- // Now, for each track matched to a seed, re-test E/p. If it passes, keep it.
- for (Track newTweakedTrack : local_tweakedTracks) {
- Cluster newSeed = local_tweakedTracksMatchedToClusters.get(newTweakedTrack);
- if (newSeed == null) { throw new AssertionError("Seed is null, but it shouldn't be -- book-keeping error"); }
- if (!newSeeds.contains(newSeed)) { throw new AssertionError("New seed is not in newSeeds!"); }
- if (unmatchedTracks.contains(newTweakedTrack)) { throw new AssertionError("Book-keeping error!"); }
- double newSeedEnergy = energy(newSeed);
- double tolerance = 3.0;
- boolean testValidEoverP_new = testEoverP_oneSided(newSeedEnergy, newTweakedTrack, tolerance);
-
- if (testValidEoverP_new) {
- // REPLACE -- we managed to break off a piece of acceptable size.
- if (m_debugSeedSplitting) { System.out.println("DEBUG: Will replace old seed of "+seed.getCalorimeterHits().size()+" hits with new seed of "+newSeed.getCalorimeterHits().size()+" hits plus photon pieces"); }
- // Check if this is one track or several bundled together:
- List<Track> individualTracksOfNewTweakedTrack = new Vector<Track>();
- if (newTweakedTrack instanceof MultipleTrackTrack) {
- individualTracksOfNewTweakedTrack.addAll(newTweakedTrack.getTracks());
- ambiguouslyMatchedTracks.addAll(newTweakedTrack.getTracks());
- for (Track subTr : newTweakedTrack.getTracks()) {
- mapOrigTrackToTweakedTrack.put(subTr, newTweakedTrack);
- }
- } else {
- uniquelyMatchedTracks.add(newTweakedTrack);
- individualTracksOfNewTweakedTrack.add(newTweakedTrack);
- mapOrigTrackToTweakedTrack.put(newTweakedTrack, newTweakedTrack);
- }
- // Add new listings
- clumps.add(newSeed);
- clustersMatchedToTracks.put(newSeed, individualTracksOfNewTweakedTrack);
- for (Track subTr : individualTracksOfNewTweakedTrack) {
- tracksMatchedToClusters.put(subTr, newSeed);
- }
- tweakedTracks.add(newTweakedTrack);
- tweakedTracksMatchedToClusters.put(newTweakedTrack, newSeed);
- clustersMatchedToTweakedTracks.put(newSeed, newTweakedTrack);
- for (Cluster tree : treesContainingOldSeed) {
- List<Cluster> targets = targetsInTree.get(tree);
- targets.add(newSeed);
- }
- } else {
- // FAIL ENTIRELY
- if (m_debugSeedSplitting) {
- System.out.println("DEBUG: Unable to replace old seed of "+seed.getCalorimeterHits().size()+" hits with smaller seed. Will turn into photon and remove track from consideration.");
- System.out.println("DEBUG: Was working with "+local_tweakedTracks.size()+" local tweaked tracks from "+individualTracks.size()+" individual tracks");
- if (clustersMatchedToTracks.get(seed) != null) { System.out.println("OLD SEED MATCHED TO NON-NULL TRACK"); }
- }
- // Add new listings as a photon
- chargedHadronLikePhotons.add(newSeed);
- modifiedPhotonClusters.add(newSeed);
- unmatchedTracks.add(newTweakedTrack);
- }
- }
- }
-
- */
-
// Track seeds
List<Cluster> seedLeftoverHitClusters = new Vector();
List<Cluster> nonSeedLeftoverHitClusters = new Vector();
@@ -661,611 +351,6 @@
}
}
- /*
-
- if (m_checkForPhotonTrackOverlap) {
- System.out.print("DEBUG: BEFORE: ");
- countHits(mipsOld, "mipsOld");
- countHits(mipsNew, "mipsNew");
- countHits(mips, "mips");
- countHits(clumps, "clumps");
- countHits(leftoverHitClusters, "leftoverHitClusters");
- countHits(modifiedPhotonClusters, "modifiedPhotonClusters");
- System.out.println("");
- printTracks(tweakedTracksMatchedToClusters);
- Set<Cluster> clustersFlaggedAsPhotonOverlaps = new HashSet<Cluster>();
- // Find which clumps/blocks are in the ECAL:
- Set<Cluster> clumpsAndBlocks = new HashSet<Cluster>();
- clumpsAndBlocks.addAll(clumps);
- clumpsAndBlocks.addAll(treesWithNoStructure);
- List<Cluster> clumpsAndBlocksEcal = new Vector<Cluster>();
- for (Cluster clus : clumpsAndBlocks) {
- HitInECALDecision dec = new HitInECALDecision();
- ListFilter filter = new ListFilter(dec);
- List<CalorimeterHit> hitsEcal = filter.filterList(clus.getCalorimeterHits());
- if (hitsEcal.size() == clus.getCalorimeterHits().size()) {
- // 100% of hits in ECAL
- clumpsAndBlocksEcal.add(clus);
- } else if (hitsEcal.size() != 0) {
- // Error: Some hits in ECAL but not all
- throw new AssertionError("Book-keeping error: "+hitsEcal.size()+" / "+clus.getCalorimeterHits().size()+" hits of cluster in ECAL -- should be all or none");
- }
- }
- // Check the ECAL clumps/blocks:
- for (Cluster clump : clumpsAndBlocksEcal) {
- int size = clump.getCalorimeterHits().size();
- Set<SimCalorimeterHit> chargedHits = new HashSet<SimCalorimeterHit>();
- Set<SimCalorimeterHit> neutralHits = new HashSet<SimCalorimeterHit>();
- Set<SimCalorimeterHit> photonHits = new HashSet<SimCalorimeterHit>();
- Map<MCParticle, List<SimCalorimeterHit>> truth = truthFromCluster(clump);
- for (MCParticle part : truth.keySet()) {
- if (Math.abs(part.getCharge()) > 0.5) {
- // Charged
- chargedHits.addAll(truth.get(part));
- } else if (part.getPDGID()==22) {
- // Photon
- photonHits.addAll(truth.get(part));
- } else {
- // Other neutral
- neutralHits.addAll(truth.get(part));
- }
- }
- Map<Integer, Set<CalorimeterHit>> mapHitsByLayer = new HashMap<Integer, Set<CalorimeterHit>>();
- for (CalorimeterHit hit : clump.getCalorimeterHits()) {
- int layer = getLayer(hit);
- Set<CalorimeterHit> hits = mapHitsByLayer.get(layer);
- if (hits == null) { hits = new HashSet<CalorimeterHit>(); mapHitsByLayer.put(layer, hits); }
- hits.add(hit);
- }
- int firstLayer = 999;
- int firstLayerWithThreeHits = 999;
- int lastLayer = -1;
- int lastLayerWithThreeHits = -1;
- boolean foundLayerWithThreeHits = false;
- for (Integer layer : mapHitsByLayer.keySet()) {
- Set<CalorimeterHit> hits = mapHitsByLayer.get(layer);
- if (hits != null && hits.size() > 0) {
- if (layer < firstLayer) { firstLayer = layer; }
- if (layer > lastLayer) { lastLayer = layer; }
- if (layer < firstLayerWithThreeHits && hits.size()>=3) { firstLayerWithThreeHits = layer; foundLayerWithThreeHits = true; }
- if (layer > lastLayerWithThreeHits && hits.size()>=3) { lastLayerWithThreeHits = layer; foundLayerWithThreeHits = true; }
- }
- }
-
- boolean flaggedAsOverlap = (firstLayerWithThreeHits < 7 && firstLayer > 0 && size > 50);
- if (flaggedAsOverlap) {
- clustersFlaggedAsPhotonOverlaps.add(clump);
- }
-
- boolean correctToFlag = (photonHits.size() > 15 && chargedHits.size() > 5);
- System.out.print("DEBUG: Flagged ");
- if (clumps.contains(clump)) {
- System.out.print("clump");
- } else {
- System.out.print("block");
- }
- System.out.print(" with "+size+" hits ");
- if (correctToFlag) {
- if (flaggedAsOverlap) {
- System.out.print("correctly as overlap");
- } else {
- System.out.print("incorrectly as non-overlap");
- }
- } else {
- if (flaggedAsOverlap) {
- System.out.print("incorrectly as overlap");
- } else {
- System.out.print("correctly as non-overlap");
- }
- }
- System.out.print(" -- cluster has "+chargedHits.size()+" charged hits + "+photonHits.size()+" photon hits + "+neutralHits.size()+" NH hits. ");
- System.out.print("Layers: "+firstLayer+"-"+lastLayer);
- if (foundLayerWithThreeHits) {
- System.out.println(" (or "+firstLayerWithThreeHits+"-"+lastLayerWithThreeHits+" with 3+ hits)");
- } else {
- System.out.println(" (no layer with 3+ hits)");
- }
- }
-
- for (Cluster clus : clustersFlaggedAsPhotonOverlaps) {
- System.out.print("DEBUG: Studying flagged ");
- if (clumps.contains(clus)) {
- System.out.print("clump");
- } else {
- System.out.print("block");
- }
- System.out.println(" with "+clus.getCalorimeterHits().size()+" hits...");
- Map<MCParticle, List<SimCalorimeterHit>> truth = truthFromCluster(clus);
- for (MCParticle part : truth.keySet()) {
- System.out.println("DEBUG: * Truth "+part.getPDGID()+" with p="+part.getMomentum().magnitude()+" contributed "+truth.get(part).size()+" hits");
- }
- // Find layer range spanned
- int firstLayer = 999;
- int lastLayer = -1;
- for (CalorimeterHit hit : clus.getCalorimeterHits()) {
- int layer = getLayer(hit);
- if (layer < firstLayer) { firstLayer = layer; }
- if (layer > lastLayer) { lastLayer = layer; }
- }
- if (firstLayer == 999 || lastLayer < 0) { throw new AssertionError("Book-keeping error"); }
- // Find hits
- Set<Long> clusterHitIDs = new HashSet<Long>();
- for (CalorimeterHit hit : clus.getCalorimeterHits()) {
- Long id = new Long(hit.getCellID());
- clusterHitIDs.add(id);
- }
- // Look for tracks which
- // 1) Extrapolate into the cluster
- // 2) Exit the cluster
- // 3) Have MIP-like hits after they exit the cluster
- List<Track> overlaidMipTracks = new Vector<Track>();
- Map<Track, List<CalorimeterHit>> matchedMipSegments = new HashMap<Track, List<CalorimeterHit>>();
- for (Track tr : trackList) {
- Set<Long> trackCellsFoundInCluster = new HashSet<Long>();
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Hep3Vector interceptPoint = null;
- if (result != null) { interceptPoint = result.getInterceptPoint(); }
- if (interceptPoint != null) {
- // Scan within cluster
- for (int iLayer=firstLayer; iLayer<=lastLayer; iLayer++) {
- Long cellID = result.extendToECALLayerAndFindCell(iLayer);
- if (cellID != null && clusterHitIDs.contains(cellID)) {
- trackCellsFoundInCluster.add(cellID);
- }
- }
- }
- System.out.println("DEBUG: Tested track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" and connected with "+trackCellsFoundInCluster.size()+" cells in flagged clump of "+clus.getCalorimeterHits().size()+" hits.");
- Cluster currentSeed = tracksMatchedToClusters.get(tr);
- if (currentSeed != null) {
- System.out.print("DEBUG: Seed has "+currentSeed.getCalorimeterHits().size()+" and is a");
- if (mips.contains(currentSeed)) { System.out.print(" MIP"); }
- if (clumps.contains(currentSeed)) { System.out.print(" clump"); }
- if (treesWithNoStructure.contains(currentSeed)) { System.out.print(" block"); }
- if (seedLeftoverHitClusters.contains(currentSeed)) { System.out.print(" small seed"); }
- if (modifiedPhotonClusters.contains(currentSeed)) { System.out.print(" photon"); }
- System.out.println(".");
- }
-
-
- if (trackCellsFoundInCluster.size()>0) {
- // Try following track further...
- List<Long> trajectory = new Vector<Long>(); // Ordered list of cell extrapolations
- int numLayersEcal = 31; // FIXME: Don't hard-code
- int numLayersHcal = 40; // FIXME: Don't hard-code
- for (int iLayer=0; iLayer<numLayersEcal; iLayer++) {
- Long cellID = result.extendToECALLayerAndFindCell(iLayer);
- if (cellID != null) {
- trajectory.add(new Long(cellID));
- }
- }
- // FIXME: Consider endcap/barrel overlap here.
- for (int iLayer=0; iLayer<numLayersHcal; iLayer++) {
- Long cellID = result.extendToHCALLayerAndFindCell(iLayer);
- if (cellID != null) {
- trajectory.add(new Long(cellID));
- }
- }
-
- // Step through layers.
- List<Boolean> foundHit = new Vector<Boolean>();
- List<Boolean> foundMipHit = new Vector<Boolean>();
- int maxNeighboursForMip_011 = 1;
-
- boolean enteredClump = false;
- boolean exitedClump = false;
- boolean currentlyInClump = false;
- int lastPseudoLayerHit = -1;
- int lastClumpPseudoLayer = -1;
- int countPseudoLayersHitAfterLastExit = 0;
- int lastExitPseudoLayer = -1;
-
- for (int iLayer=0; iLayer<trajectory.size(); iLayer++) {
- Long cellID = trajectory.get(iLayer);
- CalorimeterHit hit = allHitsMap.get(cellID);
- if (hit == null) {
- // Didn't find a hit (and therefore no MIP hit)
- foundHit.add(new Boolean(false));
- foundMipHit.add(new Boolean(false));
- } else {
- // Found a hit
- foundHit.add(new Boolean(true));
- lastPseudoLayerHit = iLayer;
- org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
- id.setID(cellID);
- long[] neighbourIDs_011 = id.getNeighbourIDs(0, 1, 1);
- long[] neighbourIDs_022 = id.getNeighbourIDs(0, 2, 2);
- Set<CalorimeterHit> neighboutHits_011 = new HashSet<CalorimeterHit>();
- Set<CalorimeterHit> neighboutHits_022 = new HashSet<CalorimeterHit>();
- for (long neighbourID : neighbourIDs_011) {
- CalorimeterHit neighbourHit = allHitsMap.get(neighbourID);
- if (neighbourHit != null) {
- neighboutHits_011.add(neighbourHit);
- }
- }
- for (long neighbourID : neighbourIDs_022) {
- CalorimeterHit neighbourHit = allHitsMap.get(neighbourID);
- if (neighbourHit != null) {
- neighboutHits_022.add(neighbourHit);
- }
- }
- boolean mipHit = (neighboutHits_011.size() <= maxNeighboursForMip_011);
- foundMipHit.add(new Boolean(mipHit));
- System.out.print("DEBUG: * Track extrapolated to hit in layer "+getLayer(hit)+" of "+hit.getSubdetector().getName());
- System.out.print(". Neighbour hits in 3x3="+neighboutHits_011.size()+", in 5x5="+neighboutHits_022.size());
- if (mipHit) {
- System.out.print(" => MIP.");
- } else {
- System.out.print(" => non-MIP.");
- }
- if (clus.getCalorimeterHits().contains(hit)) {
- System.out.print(" -- in clump!");
- enteredClump = true;
- currentlyInClump = true;
- lastClumpPseudoLayer = iLayer;
- } else {
- // Not in clump
- if (enteredClump) {
- exitedClump = true;
- }
- if (currentlyInClump) {
- // We just exited.
- lastExitPseudoLayer = iLayer;
- currentlyInClump = false;
- countPseudoLayersHitAfterLastExit = 1;
- } else {
- countPseudoLayersHitAfterLastExit++;
- }
- }
- System.out.println(" ");
- }
- }
-
- // Sanity checks
- if (foundMipHit.size() != trajectory.size()) { throw new AssertionError("Book-keeping error"); }
- if (foundHit.size() != trajectory.size()) { throw new AssertionError("Book-keeping error"); }
-
- // OK. Now, check if this is consistent with a charged track punching
- // through a photon/neutral hadron.
- //
- // First, require at least m hits in the first n pseudolayers after
- // exiting the clump to make sure that we really are extrapolating
- // the right track.
- int checkLayersAfterExitForContinuity = 7;
- int minHitsInLayersAfterExitForContinuity = 5;
- // Then, require a sequence of MIP hits to make sure that we really
- // are following a MIP and not just part of the charged shower.
- // We are allowed to skip a couple of layers immediately after exit
- // since these may have remnants of the photon/hadron cluster.
- int numLayersAllowedToSkipAfterExitForMipCheck = 2;
- int numLayersToTestForMipCheck = 6;
- int minMipHitsForMipCheck = 5;
-
- // Check for continuity
- boolean passesContinuityCheck = false;
- if (lastExitPseudoLayer >= 0) {
- int hitsFoundForContinuityCheck = 0;
- for (int iLayer=0; iLayer<checkLayersAfterExitForContinuity; iLayer++) {
- int pseudoLayer = iLayer + lastClumpPseudoLayer + 1;
- if (pseudoLayer<foundHit.size() && foundHit.get(pseudoLayer)) {
- hitsFoundForContinuityCheck++;
- }
- }
- if (hitsFoundForContinuityCheck >= minHitsInLayersAfterExitForContinuity) {
- passesContinuityCheck = true;
- }
- System.out.println("DEBUG: Found "+hitsFoundForContinuityCheck+"/"+checkLayersAfterExitForContinuity+" hits post-exit => passesContinuityCheck="+passesContinuityCheck);
- }
-
- // Check for MIPness after exit
- boolean passesMipCheck = false;
- if (lastExitPseudoLayer >= 0) {
- // Look for at least [minMipHitsForMipCheck] MIP hits in a set of [numLayersToTestForMipCheck] layers.
- // We are allowed to skip up to [numLayersAllowedToSkipAfterExitForMipCheck] layers.
- for (int iSkip=0; iSkip<=numLayersAllowedToSkipAfterExitForMipCheck; iSkip++) {
- int mipHitsFound = 0;
- for (int iLayer=0; iLayer<numLayersToTestForMipCheck; iLayer++) {
- int pseudoLayer = iLayer + lastClumpPseudoLayer + iSkip + 1;
- if (pseudoLayer<foundMipHit.size() && foundMipHit.get(pseudoLayer)) {
- mipHitsFound++;
- }
- }
- if (mipHitsFound >= minMipHitsForMipCheck) {
- passesMipCheck = true;
- }
- if (mipHitsFound > numLayersToTestForMipCheck) { throw new AssertionError("Book-keeping error"); }
- System.out.println("DEBUG: For iSkip="+iSkip+", found "+mipHitsFound+"/"+numLayersToTestForMipCheck+" MIP hits post-exit => passesMipCheck="+passesMipCheck);
- }
- }
-
- if (passesContinuityCheck && passesMipCheck) {
- // Passes the cuts!
- overlaidMipTracks.add(tr);
- List<CalorimeterHit> matchedHits = new Vector<CalorimeterHit>();
- for (int iLayer=0; iLayer<trajectory.size(); iLayer++) {
- if (foundHit.get(iLayer)) {
- Long cellID = trajectory.get(iLayer);
- CalorimeterHit hit = allHitsMap.get(cellID);
- if (hit == null) { throw new AssertionError("Book-keeping error"); }
- if (clus.getCalorimeterHits().contains(hit)) {
- matchedHits.add(hit);
- }
- }
- }
- matchedMipSegments.put(tr, matchedHits);
- }
-
- if (!enteredClump) {
- System.out.println("DEBUG: -> ERROR: Never entered clump.");
- } else {
- if (!exitedClump) {
- System.out.println("DEBUG: -> Never exited clump.");
- } else {
- System.out.print("DEBUG: -> Last clump layer = "+lastClumpPseudoLayer);
- System.out.print(", exit layer = "+lastExitPseudoLayer);
- int layersSkippedAfterExit = lastExitPseudoLayer - lastClumpPseudoLayer;
- if (layersSkippedAfterExit < 0 && !currentlyInClump) {
- // This isn't right.
- //
- // Note that if:
- // a) We exited the clump (e.g. in layer 15) <-- lastExitPseudoLayer
- // b) We later re-entered the clump (e.g. in layer 20)
- // c) We never left the clump (e.g. ending in layer 23) <-- lastClumpPseudoLayer
- // then we may have a situation where lastExitPseudoLayer < lastClumpPseudoLayer
- // (e.g. 15 < 23). That's why we have the "!currentlyInClump" check.
- throw new AssertionError("MISCOUNT -- layersSkippedAfterExit = "+lastExitPseudoLayer+" - "+lastClumpPseudoLayer+" = "+layersSkippedAfterExit);
- }
- if (layersSkippedAfterExit > 1) {
- System.out.print(" -- skipped "+(layersSkippedAfterExit-1)+" layers after exit");
- }
- System.out.println();
- }
- }
- }
- }
- if (overlaidMipTracks.size()>0) {
- // There were some MIP tracks overlaid. Deal with this.
- Track initiallyMatchedTrack = clustersMatchedToTweakedTracks.get(clus);
- if (initiallyMatchedTrack == null) { System.out.println("DEBUG: No initially matched track."); }
- List<Track> initiallyMatchedTrackList = new Vector<Track>();
- if (initiallyMatchedTrack != null) {
- if (initiallyMatchedTrack instanceof MultipleTrackTrack) {
- for (Track tr : initiallyMatchedTrack.getTracks()) { if (tr == null) { throw new AssertionError("Null track!"); } }
- initiallyMatchedTrackList.addAll(initiallyMatchedTrack.getTracks());
- } else {
- initiallyMatchedTrackList.add(initiallyMatchedTrack);
- }
- }
- System.out.println("DEBUG: "+initiallyMatchedTrackList.size()+" initially matched track(s): ");
- for (Track tr : initiallyMatchedTrackList) {
- System.out.println("DEBUG: * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
- }
- List<Track> overlaidMipTracksInitiallyMatched = new Vector<Track>();
- List<Track> overlaidMipTracksNotInitiallyMatched = new Vector<Track>();
- for (Track tr : overlaidMipTracks) {
- if (tr == null) { throw new AssertionError("Null track!"); }
- if (initiallyMatchedTrackList.contains(tr)) {
- overlaidMipTracksInitiallyMatched.add(tr);
- } else {
- overlaidMipTracksNotInitiallyMatched.add(tr);
- }
- }
- if (overlaidMipTracksInitiallyMatched.size() + overlaidMipTracksNotInitiallyMatched.size() != overlaidMipTracks.size()) { throw new AssertionError("Book-keeping error"); }
- // If all tracks turned out to be MIPs, the cluster will wind up completely
- // track free -- we'll reflag it as a photon in that case.
- List<Track> remainingTracks = new Vector<Track>();
- for (Track tr : initiallyMatchedTrackList) {
- if (tr == null) { throw new AssertionError("Null track!"); }
- if ( ! overlaidMipTracks.contains(tr) ) {
- remainingTracks.add(tr);
- }
- }
- if (remainingTracks.size() != initiallyMatchedTrackList.size() - overlaidMipTracksInitiallyMatched.size()) { throw new AssertionError("Book-keeping error"); }
- boolean reflagClumpAsPhoton = (remainingTracks.size()==0);
- // For each track, excise the MIP hits and reflag them as a MIP segment.
- // Mark that as the new track seed.
- Set<CalorimeterHit> hitsToFlagAsMip = new HashSet<CalorimeterHit>();
- Map<Track, Cluster> newMipClusters = new HashMap<Track,Cluster>();
- for (Track tr : overlaidMipTracks) {
- if (tr == null) { throw new AssertionError("Null track!"); }
- List<CalorimeterHit> hits = matchedMipSegments.get(tr);
- BasicCluster newCluster = new BasicCluster();
- for (CalorimeterHit hit : hits) {
- if (hit == null) { throw new AssertionError("Null hit!"); }
- if (!hitsToFlagAsMip.contains(hit)) {
- // OK -- this hit was identified as belonging to this track
- // and wasn't previously assigned
- newCluster.addHit(hit);
- hitsToFlagAsMip.add(hit);
- System.out.println("DEBUG: Track that passed tests passed through a hit in layer "+getLayer(hit)+" -- added it to output cluster");
- }
- }
- if (newCluster.getCalorimeterHits().size()==0) {
- throw new AssertionError("Handle this case");
- }
- newMipClusters.put(tr, newCluster);
- }
- // Now make the remnants into a new cluster
- BasicCluster remainder = new BasicCluster();
- for (CalorimeterHit hit : clus.getCalorimeterHits()) {
- if (hit == null) { throw new AssertionError("Null hit!"); }
- if ( ! hitsToFlagAsMip.contains(hit) ) {
- remainder.addHit(hit);
- }
- }
- if (remainingTracks.size()>0 && remainder.getCalorimeterHits().size()==0) { throw new AssertionError("Handle this case!"); }
- // Replace all obsolete track <--> cluster associations.
- // Start by recording how things used to look:
- Map<Track,Cluster> oldMatches = new HashMap<Track,Cluster>();
- for (Track tr : overlaidMipTracks) {
- // 1) MIP tracks (possibly pointing to another seed)
- Cluster oldMatch = tracksMatchedToClusters.get(tr);
- if (oldMatch == null) {
- System.out.println("WARNING: Overlaid MIP track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" has null cluster match");
- }
- oldMatches.put(tr, oldMatch);
- if (oldMatch != null) {
- // 2) Other tracks (possibly non-MIP) pointing to the same seed as a MIP track
- List<Track> matchedTracks = clustersMatchedToTracks.get(oldMatch);
- for (Track otherTr : matchedTracks) {
- if (otherTr == null) { throw new AssertionError("Null track!"); }
- oldMatches.put(otherTr, oldMatch);
- }
- }
- }
- for (Track tr : remainingTracks) {
- // 3) Any other track pointing to the initial cluster
- Cluster oldMatch = tracksMatchedToClusters.get(tr);
- oldMatches.put(tr, oldMatch);
- }
- // How will they look after:
- Map<Track,Cluster> newMatches = new HashMap<Track,Cluster>();
- for (Track tr : oldMatches.keySet()) {
- if (tr == null) { throw new AssertionError("Null track!"); }
- if (overlaidMipTracks.contains(tr)) {
- Cluster mipClus = newMipClusters.get(tr);
- if (mipClus == null) { throw new AssertionError("Null MIP cluster piece!"); }
- newMatches.put(tr, mipClus);
- } else if (remainingTracks.contains(tr)) {
- // This track pointed to the cluster we're breaking
- // up but wasn't flagged as a MIP.
- if (remainder.getCalorimeterHits().size()==0) { throw new AssertionError("Empty cluster!"); }
- newMatches.put(tr, remainder);
- } else {
- // This track pointed to another cluster (that we're not
- // breaking up). One of the MIP tracks also happened to
- // point to that cluster.
- Cluster oldClus = oldMatches.get(tr);
- if (oldClus == null) { throw new AssertionError("Null old cluster!"); }
- newMatches.put(tr, oldClus);
- }
- }
- // Check for ambiguity:
- Map<Cluster, List<Track>> newMatchesReverseMap = new HashMap<Cluster, List<Track>>();
- for (Track tr : newMatches.keySet()) {
- if (tr == null) { throw new AssertionError("Null track!"); }
- Cluster matchedClus = newMatches.get(tr);
- if (matchedClus == null) { throw new AssertionError("Null matched cluster!!"); }
- List<Track> reverseMatches = newMatchesReverseMap.get(matchedClus);
- if (reverseMatches == null) {
- reverseMatches = new Vector<Track>();
- newMatchesReverseMap.put(matchedClus, reverseMatches);
- }
- reverseMatches.add(tr);
- }
- // Purge the old matching info
- System.out.println("DEBUG: Will purge old matches for "+oldMatches.keySet().size()+" tracks:");
- for (Track tr : oldMatches.keySet()) {
- if (tr == null) { throw new AssertionError("Null track!"); }
- Cluster seed = oldMatches.get(tr);
- if (seed == null) {
- System.out.println("DEBUG: Null seed for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
- } else {
- clustersMatchedToTracks.remove(seed); // OK
- clustersMatchedToTweakedTracks.remove(seed); // OK (2/2)
- }
- tracksMatchedToClusters.remove(tr); // OK
- uniquelyMatchedTracks.remove(tr); // OK
- ambiguouslyMatchedTracks.remove(tr); // OK
- Track tweakedTrack = mapOrigTrackToTweakedTrack.get(tr);
- if (tweakedTrack == null) { throw new AssertionError("Null tweaked track!"); }
- mapOrigTrackToTweakedTrack.remove(tr); // OK (2/2)
- tweakedTracks.remove(tweakedTrack); // OK (2/2)
- tweakedTracksMatchedToClusters.remove(tweakedTrack); // OK (2/2)
- System.out.print("DEBUG: Purging track with p="+(new BasicHep3Vector(tr.getMomentum()).magnitude()));
- if (tweakedTrack instanceof MultipleTrackTrack) {
- System.out.print(" from MultipleTrackTrack with "+tweakedTrack.getTracks().size()+" daughters and p="+(new BasicHep3Vector(tweakedTrack.getMomentum()).magnitude()));
- }
- if (seed != null) {
- System.out.println(" that used to point to a seed with "+seed.getCalorimeterHits().size());
- } else {
- System.out.println(" that used to point to a null seed");
- }
- }
- // Add the new matching info:
- for (Track tr : newMatches.keySet()) {
- if (tr == null) { throw new AssertionError("Null track!"); }
- Cluster seed = newMatches.get(tr);
- if (seed == null) { throw new AssertionError("Null seed!"); }
- tracksMatchedToClusters.put(tr, seed);
- }
- for (Cluster seed : newMatchesReverseMap.keySet()) {
- if (seed == null) { throw new AssertionError("Null seed!"); }
- List<Track> tracks = newMatchesReverseMap.get(seed);
- if (tracks == null) { throw new AssertionError("Null track list!"); }
- clustersMatchedToTracks.put(seed, tracks);
- if (tracks.size()==0) {
- throw new AssertionError("Book-keeping error!");
- } else if (tracks.size()==1) {
- // Unique match
- Track tr = tracks.get(0);
- if (tr == null) { throw new AssertionError("Null track!"); }
- uniquelyMatchedTracks.add(tr);
- tweakedTracks.add(tr);
- mapOrigTrackToTweakedTrack.put(tr,tr);
- clustersMatchedToTweakedTracks.put(seed, tr);
- tweakedTracksMatchedToClusters.put(tr, seed);
- System.out.println("DEBUG: Unique track match: Tweaked track with p="+(new BasicHep3Vector(tr.getMomentum()).magnitude())+" -> seed cluster with "+seed.getCalorimeterHits().size()+" hits.");
- } else {
- // Ambiguous match still
- for (Track tr : tracks) { if (tr == null) { throw new AssertionError("Null track!"); } }
- MultipleTrackTrack tweakedTrack = new MultipleTrackTrack(tracks);
- ambiguouslyMatchedTracks.add(tweakedTrack);
- tweakedTracks.add(tweakedTrack);
- for (Track tr : tracks) {
- mapOrigTrackToTweakedTrack.put(tr, tweakedTrack);
- }
- clustersMatchedToTweakedTracks.put(seed, tweakedTrack);
- tweakedTracksMatchedToClusters.put(tweakedTrack, seed);
- System.out.println("DEBUG: Ambiguous track match: Tweaked track ("+tracks.size()+" sub-tracks) with p="+(new BasicHep3Vector(tweakedTrack.getMomentum()).magnitude())+" -> seed cluster with "+seed.getCalorimeterHits().size()+" hits.");
- }
- }
- // If appropriate, make the remaining cluster pieces into a photon.
- if (remainder.getCalorimeterHits().size() > 0) {
- if (reflagClumpAsPhoton) {
- System.out.println("DEBUG: Flagged remainder of cluster ("+remainder.getCalorimeterHits().size()+" hits) as photon");
- nonSeedHadronLikePhotonClusters.add(remainder);
- modifiedPhotonClusters.add(remainder);
- } else {
- System.out.println("DEBUG: Flagged remainder of cluster ("+remainder.getCalorimeterHits().size()+" hits) as clump");
- clumps.add(remainder);
- }
- }
- // Purge the old cluster that we split up from any listings:
- mipsOld.remove(clus);
- mipsNew.remove(clus);
- mips.remove(clus);
- clumps.remove(clus);
- leftoverHitClusters.remove(clus);
- treesWithNoStructure.remove(clus);
- modifiedPhotonClusters.remove(clus);
- // Enter the new split-up clusters:
- for (Track tr : overlaidMipTracks) {
- Cluster newSeed = newMipClusters.get(tr);
- if (newSeed == null) { throw new AssertionError("Null seed!"); }
- if (newSeed.getCalorimeterHits().size()>3) {
- mipsNew.add(newSeed);
- mips.add(newSeed);
- System.out.println("DEBUG: Added new MIP seed with "+newSeed.getCalorimeterHits().size()+" hits for a track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
- } else {
- leftoverHitClusters.add(newSeed);
- System.out.println("DEBUG: Added new leftoverHitCluster seed with "+newSeed.getCalorimeterHits().size()+" hits for a track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
- }
- }
- }
- }
- System.out.print("DEBUG: AFTER: ");
- countHits(mipsOld, "mipsOld");
- countHits(mipsNew, "mipsNew");
- countHits(mips, "mips");
- countHits(clumps, "clumps");
- countHits(leftoverHitClusters, "leftoverHitClusters");
- countHits(modifiedPhotonClusters, "modifiedPhotonClusters");
- System.out.println("");
- printTracks(tweakedTracksMatchedToClusters);
- }
-
- */
-
// Sort matched tracks by momentum
List<Track> tracksSortedByMomentum = new Vector<Track>();
for (Track tr : tracksMatchedToClusters.keySet()) {
@@ -1392,21 +477,6 @@
Map<Track, Set<Track>> mapTrackToJet = null;
// Set up sharing
- boolean proximityShareForSmallClusters = false;
- boolean proximityAndConeShareForSmallClusters = true;
-
- boolean dTreeShareForHaloECAL = true;
- boolean proximityAndConeShareForECAL = false;
- boolean proximityAndConeAndDTreeShareForECAL = false;
- boolean coneAndDTreeShareForECAL = false;
-
- boolean dTreeShareForHaloHCAL = true;
- boolean steveMipShareForHaloHCAL = false;
- boolean taejeongMipShareForHaloHCAL = false;
- boolean proximityAndConeShareForHCAL = false;
- boolean proximityAndConeAndDTreeShareForHCAL = false;
- boolean coneAndDTreeShareForHCAL = false;
-
double maxDistanceForSmallClusters = 250.0; // 25cm isolation cut-off
double maxDistanceForDTreeClustersECAL = 150.0; // 15cm isolation (shorter since DTree sharing exists as backup)
double maxDistanceForDTreeClustersHCAL = 200.0; // 20cm isolation (shorter since DTree sharing exists as backup)
@@ -1442,7 +512,7 @@
allSharedClusters.add(sharedSmallDTreeClusters);
}
// ECAL halo
- if (dTreeShareForHaloECAL) {
+ {
DTreeClusterSharingAlgorithm dTreeSharingAlgECAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, maxDistanceForDTreeClustersECAL);
SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, dTreeSharingAlgECAL);
sharedLeftoverHitClustersECAL.createShares(linkableClusters);
[truncated at 1000 lines; 2006 more skipped]