lcsim/src/org/lcsim/contrib/uiowa
diff -u -r1.55 -r1.56
--- ReclusterDTreeDriver.java 30 Sep 2008 01:13:43 -0000 1.55
+++ ReclusterDTreeDriver.java 13 Oct 2008 06:34:31 -0000 1.56
@@ -35,13 +35,14 @@
* in this package, which uses the implementation in
* org.lcsim.recon.cluster.directedtree developed by NIU).
*
- * @version $Id: ReclusterDTreeDriver.java,v 1.55 2008/09/30 01:13:43 tjkim Exp $
+ * @version $Id: ReclusterDTreeDriver.java,v 1.56 2008/10/13 06:34:31 mcharles Exp $
* @author Mat Charles <[log in to unmask]>
*/
public class ReclusterDTreeDriver extends ReclusterDriver {
protected String m_dTreeClusterListName;
+ protected String m_muonTrackClusterMapName;
protected boolean m_cheatOnPhotonsMisidentifiedAsNeutralHadrons = false;
protected boolean m_cheatOnHadronsMisidentifiedAsPhotons = false;
@@ -106,8 +107,13 @@
}
public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList) {
+ // Dummy to stop compiler errors
+ if (true) { throw new AssertionError("FIXME"); }
+ }
+
+ public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList, String muonTrackClusterMap, HelixExtrapolator findCluster) {
System.out.println("ReclusterDTreeDriver unstable version");
- initTrackMatch();
+ initTrackMatch(findCluster);
initCalibration();
initPlots();
m_dTreeClusterListName = dTreeClusterList;
@@ -115,6 +121,7 @@
m_mcList = mcList;
m_eval = new LikelihoodEvaluatorWrapper();
m_outputParticleListName = "DTreeReclusteredParticles";
+ m_muonTrackClusterMapName = muonTrackClusterMap;
// Look for hits near boundaries:
HitNearBarrelEndcapBoundaryDecision dec = new HitNearBarrelEndcapBoundaryDecision(6.0, 15.0, 1);
@@ -133,7 +140,9 @@
FindSubClusters clusDriverHCAL = new FindSubClusters("DTreeClustersHCAL", m_newMipFinderRadiusHCAL, m_minHitsToBeTreatedAsClusterHCAL, m_removePoorQualityMips, "OldMipsInsideTreesHCAL", "NewMipsInsideTreesHCAL", "ClumpsInsideTreesHCAL", "BlocksInsideTreesHCAL", "LeftoverHitsInsideTreesHCAL", "MapTreeToTargetsHCAL", "MapSharedToTreeHCAL");
FindSubClusters clusDriverMCAL = new FindSubClusters("DTreeClustersMCAL", m_newMipFinderRadiusMCAL, m_minHitsToBeTreatedAsClusterMCAL, m_removePoorQualityMips, "OldMipsInsideTreesMCAL", "NewMipsInsideTreesMCAL", "ClumpsInsideTreesMCAL", "BlocksInsideTreesMCAL", "LeftoverHitsInsideTreesMCAL", "MapTreeToTargetsMCAL", "MapSharedToTreeMCAL");
FindSubClusters clusDriverFCAL = new FindSubClusters("DTreeClustersFCAL", m_newMipFinderRadiusFCAL, m_minHitsToBeTreatedAsClusterFCAL, m_removePoorQualityMips, "OldMipsInsideTreesFCAL", "NewMipsInsideTreesFCAL", "ClumpsInsideTreesFCAL", "BlocksInsideTreesFCAL", "LeftoverHitsInsideTreesFCAL", "MapTreeToTargetsFCAL", "MapSharedToTreeFCAL");
- FindSubClusters clusDriverMCALforMuonID = new FindSubClusters("DTreeClustersMCALforMuonID", m_newMipFinderRadiusMCAL, m_minHitsToBeTreatedAsClusterMCAL, m_removePoorQualityMips, "OldMipsInsideTreesMCALforMuonID", "NewMipsInsideTreesMCALforMuonID", "ClumpsInsideTreesMCALforMuonID", "BlocksInsideTreesMCALforMuonID", "LeftoverHitsInsideTreesMCALforMuonID", "MapTreeToTargetsMCALforMuonID", "MapSharedToTreeMCALforMuonID");
+ /*
+ * FindSubClusters clusDriverMCALforMuonID = new FindSubClusters("DTreeClustersMCALforMuonID", m_newMipFinderRadiusMCAL, m_minHitsToBeTreatedAsClusterMCAL, m_removePoorQualityMips, "OldMipsInsideTreesMCALforMuonID", "NewMipsInsideTreesMCALforMuonID", "ClumpsInsideTreesMCALforMuonID", "BlocksInsideTreesMCALforMuonID", "LeftoverHitsInsideTreesMCALforMuonID", "MapTreeToTargetsMCALforMuonID", "MapSharedToTreeMCALforMuonID");
+ */
if (m_oldMipFinderCrossesTrees) {
clusDriverECAL.enableBarrelEndcapCrossing("EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary");
clusDriverHCAL.enableBarrelEndcapCrossing("HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary");
@@ -142,7 +151,9 @@
clusDriverECAL.setNNrange(1,1,1);
clusDriverHCAL.setNNrange(2,2,1);
clusDriverMCAL.setNNrange(2,2,1);
- clusDriverMCALforMuonID.setNNrange(2,2,1);
+ /*
+ * clusDriverMCALforMuonID.setNNrange(2,2,1);
+ */
clusDriverFCAL.setNNrange(1,1,1);
}
add(clusDriverECAL);
@@ -166,12 +177,14 @@
add(new TransientFlagDriver("ClumpsInsideTreesMCAL"));
add(new TransientFlagDriver("BlocksInsideTreesMCAL"));
add(new TransientFlagDriver("LeftoverHitsInsideTreesMCAL"));
- add(clusDriverMCALforMuonID);
- add(new TransientFlagDriver("OldMipsInsideTreesMCALforMuonID"));
- add(new TransientFlagDriver("NewMipsInsideTreesMCALforMuonID"));
- add(new TransientFlagDriver("ClumpsInsideTreesMCALforMuonID"));
- add(new TransientFlagDriver("BlocksInsideTreesMCALforMuonID"));
- add(new TransientFlagDriver("LeftoverHitsInsideTreesMCALforMuonID"));
+ /*
+ * add(clusDriverMCALforMuonID);
+ * add(new TransientFlagDriver("OldMipsInsideTreesMCALforMuonID"));
+ * add(new TransientFlagDriver("NewMipsInsideTreesMCALforMuonID"));
+ * add(new TransientFlagDriver("ClumpsInsideTreesMCALforMuonID"));
+ * add(new TransientFlagDriver("BlocksInsideTreesMCALforMuonID"));
+ * add(new TransientFlagDriver("LeftoverHitsInsideTreesMCALforMuonID"));
+ */
}
if (m_useFcal) {
add(clusDriverFCAL);
@@ -190,12 +203,6 @@
// Some likelihood selectors need per-event info
supplyEventToLikelihoodSelectors();
- // Steve's pre-shower MIP-finder
- {
- SteveMipWrapper tmpWrapper = new SteveMipWrapper();
- tmpWrapper.process(event);
- }
-
if (m_oldMipFinderCrossesTrees) {
m_allowComponentsToStraddleLargeClusters = true;
}
@@ -203,12 +210,13 @@
// Read in
List<Cluster> dTreeClusters = event.get(Cluster.class, m_dTreeClusterListName);
List<Track> trackList = event.get(Track.class, m_inputTrackList);
- Map<Track,Set<Cluster>> newMapTrackToMip = ((Map<Track, Set<Cluster>>)(event.get("newMapTrackToMip")));
- List<Track> muonTracks = new Vector<Track>();
- for(Track tr : newMapTrackToMip.keySet()) { muonTracks.add(tr);}
- boolean sub_muon = trackList.removeAll(muonTracks);
- if(sub_muon) { System.out.println("track list changed due to the muon list");}
- else { System.out.println("There is no muon track for this event"); }
+ System.out.println("DEBUG: "+this.getClass().getName()+" read in list of "+trackList.size()+" tracks named "+m_inputTrackList);
+ //TEST-MUON//Map<Track,Set<Cluster>> newMapTrackToMip = ((Map<Track, Set<Cluster>>)(event.get("newMapTrackToMip")));
+ //TEST-MUON//List<Track> muonTracks = new Vector<Track>();
+ //TEST-MUON//for(Track tr : newMapTrackToMip.keySet()) { muonTracks.add(tr);}
+ //TEST-MUON//boolean sub_muon = trackList.removeAll(muonTracks);
+ //TEST-MUON//if(sub_muon) { System.out.println("track list changed due to the muon list");}
+ //TEST/MUON//else { System.out.println("There is no muon track for this event"); }
List<Cluster> photons = event.get(Cluster.class, "PhotonClustersForDTree");
List<Cluster> largeClusters = dTreeClusters; // FIXME: NOT IDEAL! Perhaps run MST on DTree clusters?
if (trackList == null) { throw new AssertionError("Null track list!"); }
@@ -288,15 +296,33 @@
mips.addAll(mipsOld);
mips.addAll(mipsNew);
- //For muon ID
- List<Cluster> mipsMuon = new Vector<Cluster>();
- mipsMuon.addAll(event.get(Cluster.class, "OldMipsInsideTreesMCALforMuonID"));
- mipsMuon.addAll(event.get(Cluster.class, "NewMipsInsideTreesMCALforMuonID"));
- mipsMuon.addAll(event.get(Cluster.class, "ClumpsInsideTreesMCALforMuonID"));
- if (m_debug) {
- System.out.println("Found "+mips.size()+" mips, "+clumps.size()+" clumps, "+photons.size()+" photons, "+leftoverHitClusters.size()+" leftover-hit-clusters, "+treesWithNoStructure.size()+" large DTrees with no structure, and "+trackList.size()+" tracks in event.");
+
+ // TEST
+ boolean vetoThenRetainPhotons = true;
+ List<Cluster> vetoedPhotons = new Vector<Cluster>();
+ if (vetoThenRetainPhotons) {
+ vetoedPhotons = event.get(Cluster.class, "VetoedPhotonClustersMinusMuonHitsAndMipHits");
+ for (Cluster clus : vetoedPhotons) {
+ if (clus.getCalorimeterHits().size()>5) {
+ clumps.add(clus);
+ } else {
+ treesWithNoStructure .add(clus);
+ }
+ }
}
+
+ /*
+ * //For muon ID
+ * List<Cluster> mipsMuon = new Vector<Cluster>();
+ * mipsMuon.addAll(event.get(Cluster.class, "OldMipsInsideTreesMCALforMuonID"));
+ * mipsMuon.addAll(event.get(Cluster.class, "NewMipsInsideTreesMCALforMuonID"));
+ * mipsMuon.addAll(event.get(Cluster.class, "ClumpsInsideTreesMCALforMuonID"));
+ * * if (m_debug) {
+ * System.out.println("Found "+mips.size()+" mips, "+clumps.size()+" clumps, "+photons.size()+" photons, "+leftoverHitClusters.size()+" leftover-hit-clusters, "+treesWithNoStructure.size()+" large DTrees with no structure, and "+trackList.size()+" tracks in event.");
+ * }
+ */
+
// Also load book-keeping maps that record what subclusters or hits
// belong to what tree:
Map<Cluster, List<Cluster>> targetsInTreeECAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsECAL")));
@@ -344,13 +370,146 @@
Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
+ List<Track> preLinkedTracks = new Vector<Track>();
+ boolean m_useMipInfo = true;
+ if (m_useMipInfo) {
+ // Read in MIP connections
+ Map<Track,BasicCluster> MapTrkToMIP = (Map<Track,BasicCluster>)(event.get("ShowerFinderMapTrackToMip"));
+
+ // Now, each track is connected to a MIP. But some of these MIP clusters
+ // may overlap. We need to identify the cases when that happens and then
+ // * produce a merged cluster
+ // * have each of the tracks pointing to the same merged cluster
+ // First, check for overlaps...
+ Map<Cluster,Track> mapMipToTrack = new HashMap<Cluster,Track>();
+ Map<Cluster,Cluster> mapMipToMergedCluster = new HashMap<Cluster,Cluster>();
+ Map<Cluster,List<Track>> mapMergedClusterToTracks = new HashMap<Cluster,List<Track>>();
+
+ // Find hits for each MIP & which clusters they're inside
+ Map<CalorimeterHit,Set<Cluster>> hitMipMap = new HashMap<CalorimeterHit,Set<Cluster>>();
+ for (Track tr : MapTrkToMIP.keySet()) {
+ BasicCluster mip = MapTrkToMIP.get(tr);
+ mapMipToTrack.put(mip,tr);
+ for (CalorimeterHit hit : mip.getCalorimeterHits()) {
+ Set<Cluster> mipsOfHit = hitMipMap.get(hit);
+ if (mipsOfHit == null) {
+ mipsOfHit= new HashSet<Cluster>();
+ hitMipMap.put(hit, mipsOfHit);
+ }
+ mipsOfHit.add(mip);
+ }
+ }
+ // Requirements for this List:
+ // * Each cluster appears in exactly one Set
+ List<Set<Cluster>> mipOverlapSets = new Vector<Set<Cluster>>();
+ // Start filling the List:
+ for (CalorimeterHit hit : hitMipMap.keySet()) {
+ Set<Cluster> touchedClusters = hitMipMap.get(hit);
+ Set<Set<Cluster>> oldLinkedClusterSets = new HashSet<Set<Cluster>>();
+ for (Cluster clus : touchedClusters) {
+ for (Set<Cluster> currentSet : mipOverlapSets) {
+ if (currentSet.contains(clus)) {
+ oldLinkedClusterSets.add(currentSet);
+ }
+ }
+ }
+ Set<Cluster> newLinkedClusterSet = new HashSet<Cluster>();
+ newLinkedClusterSet.addAll(touchedClusters);
+ for (Set<Cluster> oldSet : oldLinkedClusterSets) {
+ newLinkedClusterSet.addAll(oldSet);
+ mipOverlapSets.remove(oldSet);
+ }
+ mipOverlapSets.add(newLinkedClusterSet);
+ }
+ // Verify that requirement above is true, i.e.
+ // * Each cluster appears in exactly one Set
+ List<Cluster> countedClusterList = new Vector<Cluster>();
+ Set<Cluster> countedClusterSet = new HashSet<Cluster>();
+ for (Set<Cluster> currentSet : mipOverlapSets) {
+ countedClusterList.addAll(currentSet);
+ countedClusterSet.addAll(currentSet);
+ }
+ if (countedClusterList.size() != MapTrkToMIP.size()) { throw new AssertionError("Book-keeping error"); }
+ if (countedClusterSet.size() != MapTrkToMIP.size()) { throw new AssertionError("Book-keeping error"); }
+ // Do merge
+ for (Set<Cluster> currentSet : mipOverlapSets) {
+ if (currentSet.size()==0) {
+ throw new AssertionError("Empty set!");
+ } else if (currentSet.size()==1) {
+ Cluster mip = currentSet.iterator().next();
+ mapMipToMergedCluster.put(mip,mip);
+ Track tr = mapMipToTrack.get(mip);
+ List<Track> mergedTracks = new Vector<Track>();
+ mergedTracks.add(tr);
+ mapMergedClusterToTracks.put(mip, mergedTracks);
+ } else {
+ BasicCluster mergedMip = new BasicCluster();
+ List<Track> mergedTracks = new Vector<Track>();
+ Set<CalorimeterHit> mergedHits = new HashSet<CalorimeterHit>();
+ for (Cluster mip : currentSet) {
+ mergedHits.addAll(mip.getCalorimeterHits());
+ Track tr = mapMipToTrack.get(mip);
+ mergedTracks.add(tr);
+ }
+ for (CalorimeterHit hit : mergedHits) {
+ mergedMip.addHit(hit);
+ }
+ for (Cluster clus : currentSet) {
+ mapMipToMergedCluster.put(clus, mergedMip);
+ mapMergedClusterToTracks.put(mergedMip, mergedTracks);
+ }
+ }
+ }
+
+ // Assign MIPs to tracks, taking overlaps into account
+ for (Cluster mergedMip : mapMergedClusterToTracks.keySet()) {
+ List<Track> tracks = mapMergedClusterToTracks.get(mergedMip);
+ if (tracks == null) { throw new AssertionError("Null tracks!"); }
+ if (tracks.size()==0) {
+ throw new AssertionError("Empty track list!");
+ } else if (tracks.size()==1) {
+ // Unique
+ Track tr = tracks.get(0);
+ if (mergedMip.getCalorimeterHits().size() > 5) {
+ // Found a good MIP
+ System.out.println("DEBUG: Good pre-shower MIP with "+mergedMip.getCalorimeterHits().size()+" hits -- adding...");
+ mipsOld.add(mergedMip);
+ mips.add(mergedMip);
+ tracksMatchedToClusters.put(tr, mergedMip);
+ clustersMatchedToTracks.put(mergedMip, tracks);
+ preLinkedTracks.add(tr);
+ } else {
+ // Didn't find a good mip
+ System.out.print("DEBUG: Dodgy pre-shower MIP with only "+mergedMip.getCalorimeterHits().size()+" hits -- not adding. Hits were:");
+ for (CalorimeterHit hit : mergedMip.getCalorimeterHits()) {
+ System.out.print(" "+hit.getCellID());
+ }
+ System.out.println();
+ leftoverHitClusters.add(mergedMip);
+ }
+ } else {
+ // Overlap -- can't treat it as a MIP.
+ System.out.println("DEBUG: Overlapping pre-shower MIP with "+mergedMip.getCalorimeterHits().size()+" hits matched to "+tracks.size()+" tracks.");
+ treesWithNoStructure.add(mergedMip);
+ }
+ }
+ }
+
+
+ // For convenience, keep separate note of those tracks which were
+ // hard-linked to a MIP before and those which were not.
+ List<Track> nonPreLinkedTracks = new Vector<Track>();
+ nonPreLinkedTracks.addAll(trackList);
+ nonPreLinkedTracks.removeAll(preLinkedTracks);
+
// Handle photons
List<Cluster> chargedHadronLikePhotons = new Vector<Cluster>();
List<Cluster> modifiedPhotonClusters = new Vector<Cluster>();
List<Cluster> photonLikePhotons = new Vector<Cluster>();
List<Cluster> electronClusters = new Vector<Cluster>();
List<Track> electronTracks = new Vector<Track>();
- photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, trackList, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
+ //photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, trackList, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
+ photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, nonPreLinkedTracks, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
// Resume track matching
List<Cluster> allMatchableClusters = new Vector<Cluster>();
@@ -365,7 +524,7 @@
if (m_debug) { System.out.println("Attempting to match "+allMatchableClusters.size()+" matchable clusters to "+trackList.size()+" tracks"); }
for (Track tr : trackList) {
- if (electronTracks.contains(tr)) {
+ if (preLinkedTracks.contains(tr) || electronTracks.contains(tr)) {
continue; // Those are already assigned!
}
Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, allMatchableClusters);
@@ -468,6 +627,8 @@
clustersMatchedToTracks.put(matchedCluster, clusTrList);
}
clusTrList.add(tr);
+ } else {
+ System.out.println("DEBUG: Failed to match track with p="+momentum(tr).magnitude());
}
}
@@ -504,6 +665,8 @@
}
}
+ System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:"); // FIXME
+
if (m_debug) {
System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
for (Track tr : unmatchedTracks) {
@@ -511,48 +674,16 @@
}
}
- ShowerPointFinder showerFinder = new ShowerPointFinder(m_findCluster, allHits, tracksMatchedToClusters);
- Map<Track,BasicCluster> MapTrkToMIP = showerFinder.findMips();
- event.put("ShowerFinderMapTrackToMip", MapTrkToMIP);
- List<Cluster> preShowerMips = new Vector<Cluster>();
- preShowerMips.addAll(MapTrkToMIP.values());
- event.put("ShowerFinderMips", preShowerMips);
- event.getMetaData(preShowerMips).setTransient(true);
-
- if(m_muonDebug) {
- aida.cloud1D("muon/muon size",100000).fill(muonTracks.size());
- System.out.println("size of muon track= " + muonTracks.size());
- for(Track tr : muonTracks){
- List<MCParticle> m_truthList = getMCParticle(tr);
- Hep3Vector track = (new BasicHep3Vector(tr.getMomentum()));
- double p = track.magnitude();
- double pT = Math.sqrt(track.x()*track.x() + track.y()*track.y());
- double costheta = Math.abs(track.z()/p);
-
- aida.cloud1D("muon/reconstructed muon").fill(p);
- aida.cloud1D("muon/reconstructed pT muon").fill(pT);
- aida.cloud1D("muon/reconstructed costheta").fill(costheta);
-
- boolean goodmatch = true;
- for(MCParticle part : m_truthList){
- if(part.getPDGID() == 13 || part.getPDGID() == -13) {
- goodmatch = true;
- break;
- }
- goodmatch = false;
- }
- if(goodmatch) {
- aida.cloud1D("muon/good matching muon").fill(p);
- aida.cloud1D("muon/good matching pT muon").fill(pT);
- aida.cloud1D("muon/good matching costheta").fill(costheta);
- }
- else {
- aida.cloud1D("muon/not good matching muon").fill(p);
- aida.cloud1D("muon/not good matching pT muon").fill(pT);
- aida.cloud1D("muon/not good matching costheta").fill(costheta);
- }
- }
- }
+ // Now done earlier:
+ /*
+ ShowerPointFinder showerFinder = new ShowerPointFinder(m_findCluster, allHits, tracksMatchedToClusters);
+ Map<Track,BasicCluster> MapTrkToMIP = showerFinder.findMips();
+ event.put("ShowerFinderMapTrackToMip", MapTrkToMIP);
+ List<Cluster> preShowerMips = new Vector<Cluster>();
+ preShowerMips.addAll(MapTrkToMIP.values());
+ event.put("ShowerFinderMips", preShowerMips);
+ event.getMetaData(preShowerMips).setTransient(true);
+ */
//Electron write out
event.put("ElectronList", electronTracks);
@@ -1407,7 +1538,7 @@
// Any structure?
Cluster tree = treeOfSharedCluster.get(clus);
List<Cluster> targetsInThisTree = targetsInTree.get(tree);
- if (targetsInThisTree.size() > 0) {
+ if (targetsInThisTree != null && targetsInThisTree.size() > 0) {
// These "leftover hit clusters" can include a LOT of halo hits from
// inside a DTree... need to break them into bite-sized pieces for
// the sharing algorithm
@@ -1434,6 +1565,7 @@
}
} else {
// No structure
+ // or target list was null because cluster was part of a pre-shower MIP
smallClustersToShare.add(clus); // TEST
}
}
@@ -1555,6 +1687,46 @@
proximityAndConeAlg.addAlgorithm(new ConeClusterSharingAlgorithm(0.95, 0.90));
}
SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAndConeAlg);
+ if (m_debug) {
+ System.out.println("DEBUG: Dumping smallClustersToShare:");
+ int tmpMaxSize = 0;
+ for (Cluster clus : smallClustersToShare) {
+ System.out.print("DEBUG: * clus with "+clus.getCalorimeterHits().size()+" hits");
+ for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+ System.out.print(" "+hit.getCellID());
+ }
+ if (linkableClusters.contains(clus)) { System.out.print(" --[linkable]"); }
+ if (leftoverHitClusters.contains(clus)) { System.out.print(" --[leftoverHitClusters]"); }
+ if (mips.contains(clus)) { System.out.print(" --[mips]"); }
+ if (clumps.contains(clus)) { System.out.print(" --[clumps]"); }
+ if (treesWithNoStructure.contains(clus)) { System.out.print(" --[treesWithNoStructure]"); }
+ if (seeds.contains(clus)) { System.out.print(" --[seeds]"); }
+ if (modifiedPhotonClusters.contains(clus)) { System.out.print(" --[modifiedPhotonClusters]"); }
+ System.out.println();
+ if (clus.getCalorimeterHits().size() > tmpMaxSize) { tmpMaxSize = clus.getCalorimeterHits().size(); }
+ }
+ System.out.println("DEBUG: Dumping linkableClusters:");
+ for (Cluster clus : linkableClusters) {
+ System.out.print("DEBUG: * clus with "+clus.getCalorimeterHits().size()+" hits");
+ if (clus.getCalorimeterHits().size() <= tmpMaxSize) {
+ for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+ System.out.print(" "+hit.getCellID());
+ }
+ }
+ if (linkableClusters.contains(clus)) { System.out.print(" --[linkable]"); }
+ if (leftoverHitClusters.contains(clus)) { System.out.print(" --[leftoverHitClusters]"); }
+ if (mips.contains(clus)) { System.out.print(" --[mips]"); }
+ if (mipsNew.contains(clus)) { System.out.print(" --[mipsNew]"); }
+ if (mipsOld.contains(clus)) { System.out.print(" --[mipsOld]"); }
+ if (clumps.contains(clus)) { System.out.print(" --[clumps]"); }
+ if (treesWithNoStructure.contains(clus)) { System.out.print(" --[treesWithNoStructure]"); }
+ if (seeds.contains(clus)) { System.out.print(" --[seeds]"); }
+ if (modifiedPhotonClusters.contains(clus)) { System.out.print(" --[modifiedPhotonClusters]"); }
+ if (event.get(Cluster.class, "OldMipsInsideTreesECAL").contains(clus)) { System.out.print(" --[OldMipsInsideTreesECAL]"); }
+ if (event.get(Cluster.class, "NewMipsInsideTreesECAL").contains(clus)) { System.out.print(" --[NewMipsInsideTreesECAL]"); }
+ System.out.println();
+ }
+ }
sharedSmallDTreeClusters.createShares(linkableClusters);
sharedSmallDTreeClusters.rebuildHints();
allSharedClusters.add(sharedSmallDTreeClusters);
@@ -1851,7 +2023,7 @@
// Outputs
makeParticlesAndWriteOut(trackList, tracksSortedByMomentum, unmatchedTracksThatDontReachCalorimeter, mapOrigTrackToTweakedTrack,
tracksMatchedToClusters, clustersMatchedToTracks,
- electronTracks, newMapTrackToMip, modifiedPhotonClusters,electronClusters, photonLikePhotons, chargedHadronLikePhotons,
+ electronTracks, modifiedPhotonClusters,electronClusters, photonLikePhotons, chargedHadronLikePhotons, vetoedPhotons,
seedHadronLikePhotonClusters, nonSeedHadronLikePhotonClusters, nonSeedPhotonLikePhotonClusters,
newMapTrackToShowerComponents, newMapShowerComponentToTrack,
linkableClusters,
@@ -2721,133 +2893,6 @@
}
}
- void scanForMuons(List<Track> trackList, Map<Track, BasicCluster> mipmap, List<Cluster> mipsMuon, List<Track> muonTracks){
- for(Track tr : trackList){
- if(m_muonDebug){ System.out.println("start scanForMuons for this track");}
- List<MCParticle> m_truthList = getMCParticle(tr);
- Hep3Vector track = (new BasicHep3Vector(tr.getMomentum()));
- double p = track.magnitude();
- double pT = Math.sqrt(track.x()*track.x() + track.y()*track.y());
- double costheta = Math.abs(track.z()/p);
- HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
- Cluster mipcluster = mipmap.get(tr);
-
- int layer = -1 ;
- String lastsubdetName = "Not found";
- Hep3Vector hitpos = new BasicHep3Vector();
- Hep3Vector tangent = new BasicHep3Vector();
- Hep3Vector tangentUnit = new BasicHep3Vector();
-
- if(mipcluster == null || result == null) {
- if(m_muonDebug){
- if(mipcluster == null) aida.cloud1D("muon/mipcluster null").fill(pT);
- if(result == null) aida.cloud1D("muon/result null").fill(pT);
- if(result == null && mipcluster == null) aida.cloud1D("muon/both bull").fill(pT);
- }
- if(pT > 1){//use intercept point or IP to get tangent direction
- if( result != null){
- Hep3Vector interceptPoint = result.getInterceptPoint();
- tangent = result.getTangent(interceptPoint);
- tangentUnit = VecOp.unit(tangent);
- }else{
- tangent = track;
- tangentUnit = VecOp.unit(tangent);
- }
- }else { continue; } // don't want to use this track to match with cluster in Muon Det.
- }else if(mipcluster.getCalorimeterHits().size() > 0){
- CalorimeterHit hit = mipcluster.getCalorimeterHits().get(mipcluster.getCalorimeterHits().size()-1);
- layer = getLayer(hit);
- lastsubdetName = hit.getSubdetector().getName();
- hitpos = new BasicHep3Vector(hit.getPosition());
- tangent = result.getTangent(hitpos);
- tangentUnit = VecOp.unit(tangent);
- } else {
- System.out.println("Error: Has both mipcluster and extrapolation but no tangent");
- continue;
- }
-
- if( mipsMuon != null){
- double bestmatch = -1;
- for(Cluster mipMuon : mipsMuon){
- if( mipMuon.getCalorimeterHits().size() > 1){
-
- CalorimeterHit muonhit1 = mipMuon.getCalorimeterHits().get(mipMuon.getCalorimeterHits().size()-1);
- CalorimeterHit muonhit0 = mipMuon.getCalorimeterHits().get(0);
- Hep3Vector muonpos1 = new BasicHep3Vector(muonhit1.getPosition());
- Hep3Vector muonpos0 = new BasicHep3Vector(muonhit0.getPosition());
- Hep3Vector trackMuon = new BasicHep3Vector();
- if(muonpos0.magnitude() > muonpos1.magnitude()){
- trackMuon = VecOp.sub(muonpos0, muonpos1);
- }else{
- trackMuon = VecOp.sub(muonpos1, muonpos0);
- }
- Hep3Vector trackMuonUnit = VecOp.unit(trackMuon);
- double cos = VecOp.dot(tangentUnit, trackMuonUnit);
- if(m_muonDebug){
- if (cos < 0) {//how come direction is opposite?
- System.out.println("tan: "+tangentUnit.x()+" "+tangentUnit.y()+" "+tangentUnit.z());
- System.out.println("muon: "+trackMuonUnit.x()+" "+trackMuonUnit.y()+" "+trackMuonUnit.z());
- System.out.println("cos: " + cos );
- }
- }
- if( cos > bestmatch) { bestmatch = cos; }
- }else { System.out.println ("This muon mip has 0 number of hits!");}
- }
- if(m_muonDebug){
- aida.cloud1D("muon/match cos distribution",10000).fill(bestmatch);
- }
-
- double r = Math.sqrt(hitpos.x()*hitpos.x() + hitpos.y()*hitpos.y());
- double z = hitpos.z();
- if(m_muonDebug) {
- System.out.println("Muon Debug: r= " + r + " z= " + z + " Det= " + lastsubdetName + " layer= " + layer);
- System.out.println("Cos theta= " + bestmatch);
- }
-
- boolean match = bestmatch > 0.8;
-
- for(MCParticle part : m_truthList){
- if(part.getPDGID() == 13 || part.getPDGID() == -13) {
- if(m_muonDebug){
- aida.cloud1D("muon/FastTrack muon").fill(p);
- aida.cloud1D("muon/FastTrack pT muon").fill(pT);
- aida.cloud1D("muon/FastTrack costheta").fill(costheta);
- }
- }
- }
-
- if(match){
- muonTracks.add(tr);
- boolean goodmatch = true;
- if(m_muonDebug) {
- aida.cloud1D("muon/reconstructed muon").fill(p);
- aida.cloud1D("muon/reconstructed pT muon").fill(pT);
- aida.cloud1D("muon/reconstructed costheta").fill(costheta);
- }
- for(MCParticle part : m_truthList){
- if(part.getPDGID() == 13 || part.getPDGID() == -13) {
- goodmatch = true;
- break;
- }
- goodmatch = false;
- }
- if(m_muonDebug){
- if(goodmatch) {
- aida.cloud1D("muon/good matching muon").fill(p);
- aida.cloud1D("muon/good matching pT muon").fill(pT);
- aida.cloud1D("muon/good matching costheta").fill(costheta);
- }
- else {
- aida.cloud1D("muon/not good matching muon").fill(p);
- aida.cloud1D("muon/not good matching pT muon").fill(pT);
- aida.cloud1D("muon/not good matching costheta").fill(costheta);
- }
- }
- }
- }else { System.out.println ("No muon mip cluster found : Null");}
- }
- }
-
void scanForElectrons(List<Track> trackList, List<Cluster> photons, List<Cluster> electronClusters, List<Track> electronTracks) {
Map<Track,Cluster> electronCandidateMap = new HashMap<Track,Cluster>();
Set<Cluster> electronCandidateClusters = new HashSet<Cluster>();
@@ -3059,6 +3104,7 @@
findStructureInsideCluster(piece, true, mipClustersOld, mipClustersNew, clumpClusters, unusedHits);
if (mipClustersOld.size()>0 || mipClustersNew.size()>0 || clumpClusters.size()>0) {
// Found substructure
+ System.out.println("DEBUG: Splitting photon seed -- adding "+mipClustersOld.size()+" mipClustersOld + "+mipClustersNew.size()+" mipClustersNew");
mipsOld.addAll(mipClustersOld);
mipsNew.addAll(mipClustersNew);
mips.addAll(mipClustersOld);
@@ -3514,7 +3560,7 @@
List<Track> trackList, List<Track> tracksSortedByMomentum, List<Track> unmatchedTracksThatDontReachCalorimeter,
Map<Track, Track> mapOrigTrackToTweakedTrack,
Map<Track,Cluster> tracksMatchedToClusters, Map<Cluster, List<Track>> clustersMatchedToTracks,
- List<Track> electronTracks, Map<Track, Set<Cluster>> newMapTrackToMip, List<Cluster> modifiedPhotonClusters, List<Cluster> electronClusters, List<Cluster> photonLikePhotons, List<Cluster> chargedHadronLikePhotons,
+ List<Track> electronTracks, List<Cluster> modifiedPhotonClusters, List<Cluster> electronClusters, List<Cluster> photonLikePhotons, List<Cluster> chargedHadronLikePhotons, List<Cluster> vetoedPhotons,
List<Cluster> seedHadronLikePhotonClusters, List<Cluster> nonSeedHadronLikePhotonClusters, List<Cluster> nonSeedPhotonLikePhotonClusters,
Map<Track, Set<Cluster>> newMapTrackToShowerComponents, Map<Cluster, Track> newMapShowerComponentToTrack,
List<Cluster> linkableClusters,
@@ -3590,6 +3636,35 @@
Set<Track> punchThroughTracks = new HashSet<Track>();
Set<Set<Track>> punchThroughJets = new HashSet<Set<Track>>();
+ // Muons (easiest case)
+ Map<Track,Cluster> muonTrackClusterMap = (Map<Track,Cluster>)(m_event.get(m_muonTrackClusterMapName));
+ for (Track tr : muonTrackClusterMap.keySet()) {
+ Cluster clus = muonTrackClusterMap.get(tr);
+ BaseReconstructedParticle part = new BaseReconstructedParticle();
+ part.addTrack(tr);
+ part.addCluster(clus);
+ part.setCharge(tr.getCharge());
+ Hep3Vector trackMomentum = momentum(tr);
+ double trackMomentumMag = trackMomentum.magnitude();
+ double currentParticleMass = mass_muon;
+ double trackEnergySq = trackMomentumMag*trackMomentumMag + currentParticleMass*currentParticleMass;
+ HepLorentzVector fourMomentum = new BasicHepLorentzVector(Math.sqrt(trackEnergySq), trackMomentum);
+ part.set4Vector(fourMomentum);
+ part.setMass(currentParticleMass);
+ part.setReferencePoint(new BasicHep3Vector(tr.getReferencePoint()));
+ if (part.getCharge()>0) {
+ part.addParticleID(pid_muplus);
+ part.setParticleIdUsed(pid_muplus);
+ } else {
+ part.addParticleID(pid_muminus);
+ part.setParticleIdUsed(pid_muminus);
+ }
+ outputParticleList.add(part);
+ outputChargedParticleList.add(part);
+ outputChargedParticleListWithEoverPveto.add(part);
+ outputChargedParticleListWithEoverPveto_pass.add(part);
+ }
+
if (m_clusterAsJets) {
for (Set<Track> jet : jets) {
// If no E/p veto, treat all as tracks:
@@ -3679,56 +3754,6 @@
}
// Write out charged particles:
- // Write out muon particles:
- for (Track tr : newMapTrackToMip.keySet()) {
- List<Cluster> clustersOfTrack = new Vector<Cluster>();
- for (Cluster clus : newMapTrackToMip.get(tr)) { clustersOfTrack.add(clus); }
- // Make ReconstructedParticle. May be more than one if MultipleTrackTrack.
- List<BaseReconstructedParticle> particles = new Vector<BaseReconstructedParticle>();
- if (tr instanceof MultipleTrackTrack) {
- for (Track subtrack : tr.getTracks()) {
- BaseReconstructedParticle part = new BaseReconstructedParticle();
- part.addTrack(subtrack);
- particles.add(part);
- }
- } else {
- BaseReconstructedParticle part = new BaseReconstructedParticle();
- part.addTrack(tr);
- particles.add(part);
- }
-
- if (particles.size() < 1) { throw new AssertionError("Book-keeping error"); }
- // Assign clusters to first particle in list
- for (Cluster clus : clustersOfTrack) {
- particles.get(0).addCluster(clus);
- }
- // Make particles properly
- for (BaseReconstructedParticle part : particles) {
- if (part.getTracks().size() != 1) { throw new AssertionError("Book-keeping error"); }
- Track trackOfThisParticle = part.getTracks().get(0);
- part.setCharge(trackOfThisParticle.getCharge());
- Hep3Vector trackMomentum = momentum(trackOfThisParticle);
- double trackMomentumMag = trackMomentum.magnitude();
- double currentParticleMass = mass_muon;
- double trackEnergySq = trackMomentumMag*trackMomentumMag + currentParticleMass*currentParticleMass;
- HepLorentzVector fourMomentum = new BasicHepLorentzVector(Math.sqrt(trackEnergySq), trackMomentum);
- part.set4Vector(fourMomentum);
- part.setMass(currentParticleMass);
- part.setReferencePoint(new BasicHep3Vector(trackOfThisParticle.getReferencePoint()));
- if (part.getCharge()>0) {
- part.addParticleID(pid_muplus);
- part.setParticleIdUsed(pid_muplus);
- } else {
- part.addParticleID(pid_muminus);
- part.setParticleIdUsed(pid_muminus);
- }
- }
-
- outputParticleList.addAll(particles);
- outputChargedParticleList.addAll(particles);
- outputChargedParticleListWithEoverPveto.addAll(particles);
- outputChargedParticleListWithEoverPveto_pass.addAll(particles);
- }
// FIXME: Break up MultipleTrackTrack tracks.
for (Track tr : tracksSortedByMomentum) {
@@ -3969,6 +3994,17 @@
Map<Cluster, Cluster> neutralClusterCoreOfCluster = new HashMap<Cluster, Cluster>();
Set<Cluster> unusedUnmatchedClusterPieces = new HashSet<Cluster>();
unusedUnmatchedClusterPieces.addAll(unmatchedClusterPieces);
+ // If a photon-like cluster was vetoed because of a nearby MIP but was
+ // never used in a charged shower, re-flag it as a photon.
+ Set<Cluster> extraClustersToTreatAsPhotons = new HashSet<Cluster>();
+ for (Cluster clus : vetoedPhotons) {
+ if (unusedUnmatchedClusterPieces.contains(clus)) {
+ // Treat this one as a photon instead
+ unusedUnmatchedClusterPieces.remove(clus);
+ extraClustersToTreatAsPhotons.add(clus);
+ System.out.println("DEBUG: Will treat cluster of "+clus.getCalorimeterHits().size()+" hits as a photon because it was initially flagged as one, then vetoed for proximity to a MIP, then never used in a charged shower.");
+ }
+ }
double threshold = 0.7;
int crosscheckCount = unusedUnmatchedClusterPieces.size();
for (Cluster clus : unmatchedClusterPieces) {
@@ -4015,7 +4051,6 @@
// Sometimes photons (or photon fragments) are misidentified as neutral
// hadrons. In those cases, the energy is overestimated. Optionally,
// we can go back and change the ID to photon based on truth info.
- Set<Cluster> extraClustersToTreatAsPhotons = new HashSet<Cluster>();
if (m_cheatOnPhotonsMisidentifiedAsNeutralHadrons) {
System.out.println("WARNING: Cheating on neutral hadron -> photon ID");
for (Cluster clus : neutralClusterCores) {
@@ -4235,11 +4270,13 @@
allUsedHits .addAll(clus.getCalorimeterHits());
allUsedHitsList.addAll(clus.getCalorimeterHits());
}
+ checkForDuplicateHitsInClusters(allUsedCores);
if (allUsedHits.size() != allUsedHitsList.size()) { throw new AssertionError("Mis-counting of hits: "+allUsedHits.size()+" vs "+allUsedHitsList.size()); }
// ... and hits that are shared to them...
Cluster sharedHitClus = makeClusterOfSharedHits(allUsedCores, allSharedClusters, 0.0000001);
allUsedHits .addAll(sharedHitClus.getCalorimeterHits());
allUsedHitsList.addAll(sharedHitClus.getCalorimeterHits());
+ checkForDuplicateHits(allUsedHits);
if (allUsedHits.size() != allUsedHitsList.size()) { throw new AssertionError("Mis-counting of hits: "+allUsedHits.size()+" vs "+allUsedHitsList.size()); }
// Back-convert to ID (to avoid problem with multiple classes of CalorimeterHit
// causing duplicates):
@@ -4984,5 +5021,30 @@
}
}
}
+
+ void checkForDuplicateHitsInClusters(Collection<Cluster> clusters) {
+ Set<CalorimeterHit> allUsedHits = new HashSet<CalorimeterHit>();
+ List<CalorimeterHit> allUsedHitsList = new Vector<CalorimeterHit>();
+ for (Cluster clus : clusters) {
+ for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+ allUsedHits.add(hit);
+ allUsedHitsList.add(hit);
+ if (allUsedHits.size() != allUsedHitsList.size()) {
+ throw new AssertionError("ERROR: Duplicate hit with ID "+hit.getCellID());
+ }
+ }
+ }
+ }
+ void checkForDuplicateHits(Collection<CalorimeterHit> hits) {
+ Set<CalorimeterHit> allUsedHits = new HashSet<CalorimeterHit>();
+ List<CalorimeterHit> allUsedHitsList = new Vector<CalorimeterHit>();
+ for (CalorimeterHit hit : hits) {
+ allUsedHits.add(hit);
+ allUsedHitsList.add(hit);
+ if (allUsedHits.size() != allUsedHitsList.size()) {
+ throw new AssertionError("ERROR: Duplicate hit with ID "+hit.getCellID());
+ }
+ }
+ }
}