lcsim/src/org/lcsim/contrib/uiowa
diff -N ReclusterDTreeDriver.java
--- /dev/null 1 Jan 1970 00:00:00 -0000
+++ ReclusterDTreeDriver.java 17 Dec 2007 22:31:25 -0000 1.1
@@ -0,0 +1,596 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import java.io.IOException;
+import hep.aida.*;
+import hep.physics.vec.*;
+import hep.physics.particle.properties.*;
+import org.lcsim.util.*;
+import org.lcsim.util.hitmap.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+import org.lcsim.recon.cluster.mipfinder.*;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.recon.cluster.clumpfinder.*;
+import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.mc.fast.tracking.ReconTrack;
+import org.lcsim.event.base.*;
+import hep.physics.particle.Particle;
+import org.lcsim.recon.cluster.structural.likelihood.*;
+import org.lcsim.recon.cluster.structural.*;
+import org.lcsim.recon.cluster.mipfinder.*;
+import org.lcsim.recon.cluster.clumpfinder.*;
+
+public class ReclusterDTreeDriver extends ReclusterDriver {
+
+ protected String m_dTreeClusterListName;
+
+ public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList) {
+ initTrackMatch();
+ initCalibration();
+ initPlots();
+ m_dTreeClusterListName = dTreeClusterList;
+ m_inputTrackList = trackList;
+ m_mcList = mcList;
+ m_debugEoverP = true;
+ m_eval = new LikelihoodEvaluatorWrapper();
+ m_outputParticleListName = "DTreeReclusteredParticles";
+ }
+
+ public void process(EventHeader event) {
+ super.debugProcess(event);
+ m_event = event;
+
+ // Read in
+ List<Cluster> dTreeClusters = event.get(Cluster.class, m_dTreeClusterListName);
+ List<Track> trackList = event.get(Track.class, m_inputTrackList);
+ List<Cluster> photons = event.get(Cluster.class, "PhotonClustersForDTree");
+ List<Cluster> largeClusters = event.get(Cluster.class, "MSTClustersLinkedWithTenOrMoreHits");
+ if (trackList == null) { throw new AssertionError("Null track list!"); }
+ if (trackList.contains(null)) { throw new AssertionError("Track list contains null!"); }
+
+ for (Cluster photon : photons) {
+ if (photon.getCalorimeterHits().contains(null)) {
+ throw new AssertionError("photon contains null hit");
+ }
+ }
+
+ // Look for mips, clumps *within* the DTree clusters
+
+ // Here are the clusterers:
+ Clusterer mipfinder = new TrackClusterDriver();
+ Clusterer clumpfinder = new ClumpFinder();
+ // Lists of subclusters
+ List<Cluster> mips = new Vector<Cluster>();
+ List<Cluster> clumps = new Vector<Cluster>();
+ List<Cluster> leftoverHitClusters = new Vector<Cluster>();
+ // Make two-way maps (from tree to contents, and from subclusters to tree)
+ Map<Cluster, List<Cluster>> mipsInTrees = new HashMap<Cluster, List<Cluster>>();
+ Map<Cluster, List<Cluster>> clumpsInTrees = new HashMap<Cluster, List<Cluster>>();
+ Map<Cluster, List<CalorimeterHit>> otherHitsInTrees = new HashMap<Cluster, List<CalorimeterHit>>();
+ Map<Cluster, Cluster> treeOfMip = new HashMap<Cluster, Cluster>();
+ Map<Cluster, Cluster> treeOfClump = new HashMap<Cluster,Cluster>();
+ Map<Cluster, Cluster> treeOfLeftoverHits = new HashMap<Cluster,Cluster>();
+ List<Cluster> treesWithNoStructure = new Vector<Cluster>();
+ int minHitsToBeTreatedAsCluster = 5;
+ for (Cluster dTreeCluster : dTreeClusters) {
+ // Prepare hitmap, initially holdling all the hits in the cluster
+ Map<Long, CalorimeterHit> hitsInTree = new HashMap<Long, CalorimeterHit>();
+ for (CalorimeterHit hit : dTreeCluster.getCalorimeterHits()) {
+ hitsInTree.put(hit.getCellID(), hit);
+ }
+ if (hitsInTree.values().contains(null)) { throw new AssertionError("null hit in hitsInTree.values()"); }
+ // Make mips
+ List<Cluster> mipClusters = mipfinder.createClusters(hitsInTree);
+ mipsInTrees.put(dTreeCluster, mipClusters);
+ mips.addAll(mipClusters);
+ // Remove mip hits from the hitmap so they're not used twice
+ for (Cluster mip : mipClusters) {
+ if (mip.getCalorimeterHits().size() == 0) { throw new AssertionError("mip has no hits"); }
+ if (mip.getCalorimeterHits().contains(null)) { throw new AssertionError("null hit in mip"); }
+ treeOfMip.put(mip, dTreeCluster);
+ for (CalorimeterHit hit : mip.getCalorimeterHits()) {
+ hitsInTree.remove(hit.getCellID());
+ }
+ }
+ // Make clumps
+ List<Cluster> clumpClusters = clumpfinder.createClusters(hitsInTree);
+ clumpsInTrees.put(dTreeCluster, clumpClusters);
+ clumps.addAll(clumpClusters);
+ // Remove clump hits from the hitmap so they're not used twice
+ for (Cluster clump : clumpClusters) {
+ if (clump.getCalorimeterHits().size() == 0) { throw new AssertionError("clump has no hits"); }
+ if (clump.getCalorimeterHits().contains(null)) { throw new AssertionError("null hit in clump"); }
+ treeOfClump.put(clump, dTreeCluster);
+ for (CalorimeterHit hit : clump.getCalorimeterHits()) {
+ hitsInTree.remove(hit.getCellID());
+ }
+ }
+ if (mips.size()==0 && clumps.size()==0 && hitsInTree.size()>=minHitsToBeTreatedAsCluster) {
+ // No structure found in tree. Treat it as a block.
+ treesWithNoStructure.add(dTreeCluster);
+ } else {
+ // Structure found in tree. Rest are leftover hits:
+ if (hitsInTree.size() > 0) {
+ List<CalorimeterHit> leftoverHits = new Vector<CalorimeterHit>(hitsInTree.values());
+ if (leftoverHits.contains(null)) { throw new AssertionError("null hit in leftoverHits"); }
+ otherHitsInTrees.put(dTreeCluster, leftoverHits);
+ BasicCluster leftoverHitCluster = new BasicCluster();
+ for (CalorimeterHit hit : leftoverHits) {
+ leftoverHitCluster.addHit(hit);
+ }
+ leftoverHitClusters.add(leftoverHitCluster);
+ treeOfLeftoverHits.put(leftoverHitCluster, dTreeCluster);
+ }
+ }
+ }
+ Map<Cluster, Cluster> treeOfComponent = new HashMap<Cluster,Cluster>();
+ treeOfComponent.putAll(treeOfMip);
+ treeOfComponent.putAll(treeOfClump);
+ treeOfComponent.putAll(treeOfLeftoverHits);
+
+ System.out.println("Found "+mips.size()+" mips, "+clumps.size()+" clumps, "+photons.size()+" photons, "+leftoverHitClusters.size()+" leftover-hit-clusters, "+treesWithNoStructure.size()+" large DTrees with no structure, and "+trackList.size()+" tracks in event.");
+
+ // Match tracks
+ List<Cluster> allMatchableClusters = new Vector<Cluster>();
+ allMatchableClusters.addAll(mips);
+ allMatchableClusters.addAll(clumps);
+ allMatchableClusters.addAll(photons);
+ allMatchableClusters.addAll(leftoverHitClusters);
+ allMatchableClusters.addAll(treesWithNoStructure);
+ if (allMatchableClusters.contains(null)) { throw new AssertionError("Book-keeping error: null cluster in list allMatchableClusters"); }
+ Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
+ Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
+ System.out.println("Attempting to match "+allMatchableClusters.size()+" matchable clusters to "+trackList.size()+" tracks");
+ for (Track tr : trackList) {
+ Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, allMatchableClusters);
+ if (matchedCluster != null) {
+ tracksMatchedToClusters.put(tr, matchedCluster);
+ List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
+ if (clusTrList == null) {
+ clusTrList = new Vector<Track>();
+ clustersMatchedToTracks.put(matchedCluster, clusTrList);
+ }
+ clusTrList.add(tr);
+ }
+ }
+
+ // Unmatched tracks
+ List<Track> unmatchedTracks = new Vector<Track>();
+ unmatchedTracks.addAll(trackList);
+ unmatchedTracks.removeAll(tracksMatchedToClusters.keySet());
+ System.out.println("DEBUG: "+unmatchedTracks.size()+" unmatched tracks remaining:");
+ for (Track tr : unmatchedTracks) {
+ System.out.println(" track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude());
+ }
+
+ // Figure out whether tracks were uniquely matched or not:
+ Set<Track> uniquelyMatchedTracks = new HashSet<Track>();
+ Set<Track> ambiguouslyMatchedTracks = new HashSet<Track>();
+ List<Set<Track>> ambiguouslyMatchedTrackGroups = new Vector<Set<Track>>();
+ for (Cluster clus : clustersMatchedToTracks.keySet()) {
+ List<Track> tracksMatchedToThisCluster = clustersMatchedToTracks.get(clus);
+ if (tracksMatchedToThisCluster.size() > 1) {
+ ambiguouslyMatchedTracks.addAll(tracksMatchedToThisCluster);
+ Set<Track> newSet = new HashSet<Track>();
+ newSet.addAll(tracksMatchedToThisCluster);
+ ambiguouslyMatchedTrackGroups.add(newSet);
+ } else {
+ uniquelyMatchedTracks.addAll(tracksMatchedToThisCluster);
+ }
+ }
+ if (uniquelyMatchedTracks.size() + ambiguouslyMatchedTracks.size() != tracksMatchedToClusters.keySet().size()) { throw new AssertionError("Book-keeping error: "+uniquelyMatchedTracks.size()+" + "+ambiguouslyMatchedTracks.size()+" != "+tracksMatchedToClusters.keySet().size()); }
+ // Redo ambiguous tracks hackishly:
+ // Similarly, redo track -> cluster map to use tweaked tracks
+ Map<Track, Cluster> tweakedTracksMatchedToClusters = new HashMap<Track,Cluster>();
+ List<Track> tweakedTracks = new Vector<Track>();
+ tweakedTracks.addAll(uniquelyMatchedTracks);
+ for (Set<Track> trackSet : ambiguouslyMatchedTrackGroups) {
+ MultipleTrackTrack tweakedTrack = new MultipleTrackTrack(trackSet);
+ tweakedTracks.add(tweakedTrack);
+ for (Track origTrack : trackSet) {
+ Cluster clus = tracksMatchedToClusters.get(origTrack);
+ tweakedTracksMatchedToClusters.put(tweakedTrack, clus); // over-writes sometimes, but that's OK.
+ }
+ }
+ for (Track tr : uniquelyMatchedTracks) {
+ Cluster clus = tracksMatchedToClusters.get(tr);
+ tweakedTracksMatchedToClusters.put(tr, clus);
+ }
+
+ // Track seeds
+ Set<Cluster> seeds = clustersMatchedToTracks.keySet();
+ List<Cluster> seedLeftoverHitClusters = new Vector();
+ List<Cluster> nonSeedLeftoverHitClusters = new Vector();
+ for (Cluster clus : leftoverHitClusters) {
+ if (seeds.contains(clus) ) {
+ seedLeftoverHitClusters.add(clus);
+ } else {
+ nonSeedLeftoverHitClusters.add(clus);
+ }
+ }
+ if ( seedLeftoverHitClusters.size() + nonSeedLeftoverHitClusters.size() != leftoverHitClusters.size() ) { throw new AssertionError("Book-keeping error"); }
+ List<Cluster> seedPhotonClusters = new Vector();
+ List<Cluster> nonSeedPhotonClusters = new Vector();
+ for (Cluster clus : photons) {
+ if (seeds.contains(clus)) {
+ seedPhotonClusters.add(clus);
+ } else {
+ nonSeedPhotonClusters.add(clus);
+ }
+ }
+ if (seedPhotonClusters.size() + nonSeedPhotonClusters.size() != photons.size() ) { throw new AssertionError("Book-keeping error"); }
+
+ // Sort matched tracks by momentum
+ List<Track> tracksSortedByMomentum = new Vector<Track>();
+ for (Track tr : tweakedTracksMatchedToClusters.keySet()) {
+ tracksSortedByMomentum.add(tr);
+ }
+ Collections.sort(tracksSortedByMomentum, new MomentumSort());
+
+ // Prep for linking
+ List<Cluster> linkableClusters = new Vector<Cluster>();
+ List<Cluster> smallClustersToShare = new Vector<Cluster>();
+ List<Cluster> leftoverHitClustersToShare = new Vector<Cluster>();
+ linkableClusters.addAll(mips);
+ linkableClusters.addAll(photons);
+ linkableClusters.addAll(clumps);
+ linkableClusters.addAll(treesWithNoStructure);
+ for (Cluster clus : leftoverHitClusters) {
+ if (seeds.contains(clus)) {
+ linkableClusters.add(clus);
+ } else if (clus.getCalorimeterHits().size() < 3) {
+ smallClustersToShare.add(clus);
+ } else {
+ leftoverHitClustersToShare.add(clus);
+ }
+ }
+
+ // Initially, cheat
+ resetPotentialLinks();
+ if (m_cheatOnScoring) {
+ initPotentialLinks_cheating(linkableClusters, clustersMatchedToTracks);
+ } else {
+ double scaleFactorTrackToTrack = 1.0;
+ double scaleFactorTrackToClump = 1.0;
+ double scaleFactorTrackToSmallSeed = 1.0;
+ double thresholdForProximity = 50.0; // 5cm. FIXME: Don't hard-code.
+ double thresholdForProximityClump = 75.0; // FIXME: Don't hard-code.
+ initPotentialLinks_SkeletonsWithinLargeClus(largeClusters, mips, clumps, scaleFactorTrackToTrack, scaleFactorTrackToClump);
+ initPotentialLinks_MipMip(mips, scaleFactorTrackToTrack, true);
+ initPotentialLinks_MipClump(mips, clumps, scaleFactorTrackToClump, true);
+ initPotentialLinks_MipMisc(mips, seedLeftoverHitClusters, thresholdForProximity, "SmallSeed");
+ initPotentialLinks_MipMisc(mips, seedPhotonClusters, thresholdForProximity, "Photon");
+ initPotentialLinks_MiscMisc(seedLeftoverHitClusters, clumps, thresholdForProximity, "SmallSeed", "Clump");
+ initPotentialLinks_MiscMisc(seedLeftoverHitClusters, nonSeedPhotonClusters, thresholdForProximity, "SmallSeed", "Photon");
+ initPotentialLinks_MiscSelf(clumps, thresholdForProximityClump, "Clump", true);
+
+ initPotentialLinks_MipMisc(mips, treesWithNoStructure, thresholdForProximity, "LargeStructurelessTree");
+ initPotentialLinks_MiscMisc(seedLeftoverHitClusters, treesWithNoStructure, thresholdForProximity, "SmallSeed", "LargeStructurelessTree");
+ initPotentialLinks_MiscMisc(clumps, treesWithNoStructure, thresholdForProximity, "Clump", "LargeStructurelessTree");
+ initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
+ }
+
+ // Done making links. Prep & build skeletons:
+ sortLinks();
+
+ // Try to build skeletons, working with tracks in momentum-sorted order...
+ Map<Track,Double> newMapTrackToThreshold = new HashMap<Track,Double>(); // for geometrical checks
+ Map<Track,Double> newMapTrackToTolerance = new HashMap<Track,Double>(); // for E/p checks
+ for (Track tr : tracksSortedByMomentum) {
+ newMapTrackToThreshold.put(tr, new Double(0.7));
+ newMapTrackToTolerance.put(tr, new Double(1.0));
+ }
+ Map<Cluster, Track> newMapShowerComponentToTrack = null;
+ Map<Track, Set<Cluster>> newMapTrackToShowerComponents = null;
+ Map<Track, Set<Cluster>> newMapTrackToVetoedAdditions = null;
+
+ // Set up sharing
+ List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
+ ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
+ SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAlgForSmallClusters);
+ sharedSmallDTreeClusters.createShares(linkableClusters);
+ sharedSmallDTreeClusters.rebuildHints();
+ allSharedClusters.add(sharedSmallDTreeClusters);
+ DTreeClusterSharingAlgorithm dTreeSharingAlg = new DTreeClusterSharingAlgorithm(treeOfComponent, 20.0, 150.0);
+ SharedClusterGroup sharedLeftoverHitClusters = new SharedClusterGroup(leftoverHitClustersToShare, dTreeSharingAlg);
+ sharedLeftoverHitClusters.createShares(linkableClusters);
+ sharedLeftoverHitClusters.rebuildHints();
+ allSharedClusters.add(sharedLeftoverHitClusters);
+
+ // Iterate to build clusters:
+ for (int iIter=0; iIter<10; iIter++) {
+ newMapShowerComponentToTrack = new HashMap<Cluster, Track>();
+ newMapTrackToShowerComponents = new HashMap<Track, Set<Cluster>>();
+ newMapTrackToVetoedAdditions = new HashMap<Track, Set<Cluster>>();
+ for (Track tr : tracksSortedByMomentum) {
+ Set<Cluster> vetoedAdditions = new HashSet<Cluster>();
+ Set<Cluster> showerComponents = iterateOnOneTrack(tr, tweakedTracksMatchedToClusters, allSharedClusters, newMapTrackToThreshold.get(tr), newMapTrackToTolerance.get(tr), newMapShowerComponentToTrack, vetoedAdditions);
+ newMapTrackToShowerComponents.put(tr, showerComponents);
+ for (Cluster clus : showerComponents) {
+ newMapShowerComponentToTrack.put(clus, tr);
+ }
+ newMapTrackToVetoedAdditions.put(tr, vetoedAdditions);
+ }
+
+ // Look for tracks which
+ // * Have E/p significantly below 1, and/or
+ // * Could be linked to a cluster but were vetoed, PROVIDED THAT the cluster was not put in any other shower
+ Set<Track> tracksWithEoverPTooLow = new HashSet<Track>();
+ Set<Track> tracksWithVetoedLinkToUnusedCluster = new HashSet<Track>();
+
+ for (Track tr : tracksSortedByMomentum) {
+ Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+ if (showerComponents == null || showerComponents.size()==0) {
+ System.out.println("ERROR: Every track should have at least one shower component (its seed)");
+ System.out.println("ERROR: But track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" does not.");
+ if (uniquelyMatchedTracks.contains(tr)) {
+ System.out.println("ERROR: uniquelyMatchedTracks contains track");
+ } else if (ambiguouslyMatchedTracks.contains(tr)) {
+ System.out.println("ERROR: ambiguouslyMatchedTracks contains track");
+ } else {
+ System.out.println("ERROR: Neither uniquelyMatchedTracks nor ambiguouslyMatchedTracks contains track");
+ }
+ throw new AssertionError("ERROR: Failed to find a cluster for track");
+ }
+ double clusterEnergy = energy(showerComponents);
+ double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+ double sigma = 0.7 * Math.sqrt(trackMomentum);
+ double normalizedResidual = (clusterEnergy-trackMomentum)/sigma;
+ if (normalizedResidual < -1.0) {
+ // FIXME: This cut-off shouldn't be hard-coded
+ tracksWithEoverPTooLow.add(tr);
+ }
+ }
+ for (Cluster clus : linkableClusters) {
+ if (newMapShowerComponentToTrack.get(clus) == null) {
+ Set<Track> tracksVetoedForThisCluster = new HashSet<Track>();
+ for (Track trForVeto : tracksSortedByMomentum) {
+ if (newMapTrackToVetoedAdditions.get(trForVeto).contains(clus)) {
+ tracksWithVetoedLinkToUnusedCluster.add(trForVeto);
+ }
+ }
+
+ }
+ }
+
+ // Adjust tolerance (how wrong in E/p we'll allow a cluster addition to make us)
+ // and threshold (how low in score we'll go for adding a link)
+ double maximumAllowedTolerance = 2.5;
+ double minimumAllowedThreshold = 0.3;
+ boolean stateChange = false;
+
+ for (Track tr : tracksWithVetoedLinkToUnusedCluster) {
+ Double oldTolerance = newMapTrackToTolerance.get(tr);
+ double newTolerance = oldTolerance.doubleValue() + 0.25;
+ if (newTolerance <= maximumAllowedTolerance) {
+ newMapTrackToTolerance.put(tr, newTolerance);
+ stateChange = true;
+ }
+ }
+ for (Track tr : tracksWithEoverPTooLow) {
+ Double oldThreshold = newMapTrackToThreshold.get(tr);
+ double newThreshold = oldThreshold.doubleValue() - 0.05;
+ if (newThreshold >= minimumAllowedThreshold) {
+ newMapTrackToThreshold.put(tr, new Double(newThreshold));
+ stateChange = true;
+ }
+ }
+
+ if (!stateChange) {
+ // Reached steady state => stop
+ break;
+ }
+ }
+
+ List<Cluster> tmp = new Vector<Cluster>();
+ printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, tmp, tmp, linkableClusters, tmp, tmp, newMapTrackToVetoedAdditions);
+
+ // Outputs
+
+ // OK, time to write things out!
+ List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+ List<ReconstructedParticle> outputChargedParticleList = new Vector<ReconstructedParticle>();
+ List<ReconstructedParticle> outputPhotonParticleList = new Vector<ReconstructedParticle>();
+ List<ReconstructedParticle> outputNeutralHadronParticleList = new Vector<ReconstructedParticle>();
+
+ // PID info
+ int pdg_piplus = 211;
+ int pdg_piminus = 211;
+ ParticleType type_piplus = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_piplus);
+ ParticleType type_piminus = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_piminus);
+ BaseParticleID pid_piplus = new BaseParticleID(type_piplus);
+ BaseParticleID pid_piminus = new BaseParticleID(type_piminus);
+ double mass_piplus = 0.1396;
+ int pdg_photon = 22;
+ ParticleType type_photon = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_photon);
+ BaseParticleID pid_photon = new BaseParticleID(type_photon);
+ int pdg_K0 = 130;
+ ParticleType type_K0 = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_K0);
+ BaseParticleID pid_K0 = new BaseParticleID(type_K0);
+ double mass_K0 = type_K0.getMass();
+
+ // Charged tracks
+ for (Track tr : tracksSortedByMomentum) {
+ Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+ // write out charged shower
+ BaseReconstructedParticle part = new BaseReconstructedParticle();
+ for (Cluster clus : showerComponents) { part.addCluster(clus); }
+ Cluster sharedHitClus = makeClusterOfSharedHits(showerComponents, allSharedClusters);
+ if (sharedHitClus.getCalorimeterHits().size()>0) { part.addCluster(sharedHitClus); }
+ part.addTrack(tr);
+ part.setCharge(tr.getCharge());
+ Hep3Vector trackMomentum = new BasicHep3Vector(tr.getMomentum());
+ if (tr instanceof BaseTrackMC) {
+ // FIXME: Also do this for MultipleTrackTracks
+ trackMomentum = ((BaseTrackMC)(tr)).getMCParticle().getMomentum();
+ }
+ double trackMomentumMag = trackMomentum.magnitude();
+ double trackEnergySq = trackMomentumMag*trackMomentumMag + mass_piplus*mass_piplus;
+ HepLorentzVector fourMomentum = new BasicHepLorentzVector(Math.sqrt(trackEnergySq), trackMomentum);
+ part.set4Vector(fourMomentum);
+ part.setMass(mass_piplus);
+ part.setReferencePoint(new BasicHep3Vector(tr.getReferencePoint()));
+ part.addParticleID(pid_piplus);
+ part.setParticleIdUsed(pid_piplus);
+ outputParticleList.add(part);
+ outputChargedParticleList.add(part);
+ }
+
+ // Photons (...)
+ for (Cluster clus : photons) {
+ Track tr = newMapShowerComponentToTrack.get(clus);
+ if (tr == null) {
+ // write out photon
+ BaseReconstructedParticle part = new BaseReconstructedParticle();
+ part.addCluster(clus);
+ // Add fuzzy hits
+ Cluster sharedHitClus = makeClusterOfSharedHits(clus, allSharedClusters);
+ if (sharedHitClus.getCalorimeterHits().size()>0) { part.addCluster(sharedHitClus); }
+ double clusterEnergy = energy(clus, allSharedClusters, m_photonCalib);
+ Hep3Vector pos = new BasicHep3Vector(clus.getPosition());
+ Hep3Vector threeMomentum = VecOp.mult(clusterEnergy, VecOp.unit(pos)); // assume it comes from the IP
+ HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
+ part.set4Vector(fourMomentum);
+ part.setReferencePoint(0,0,0);
+ part.setCharge(0);
+ // Set the PID and mass
+ part.addParticleID(pid_photon);
+ part.setParticleIdUsed(pid_photon);
+ part.setMass(0.0);
+ // Add to the output list
+ outputParticleList.add(part);
+ outputPhotonParticleList.add(part);
+ }
+ }
+
+ // NHad
+
+ List<Cluster> unmatchedClusterPieces = new Vector<Cluster>();
+ for (Cluster clus : linkableClusters) {
+ Track tr = newMapShowerComponentToTrack.get(clus);
+ if (tr == null) {
+ if (!photons.contains(clus)) {
+ unmatchedClusterPieces.add(clus);
+ }
+ }
+ }
+
+ List<Cluster> neutralClusterCores = new Vector<Cluster>();
+ Set<Cluster> unusedUnmatchedClusterPieces = new HashSet<Cluster>();
+ unusedUnmatchedClusterPieces.addAll(unmatchedClusterPieces);
+ double threshold = 0.7;
+ int crosscheckCount = unusedUnmatchedClusterPieces.size();
+ for (Cluster clus : unmatchedClusterPieces) {
+ if (unusedUnmatchedClusterPieces.contains(clus)) {
+ // Try to build a cluster...
+ Set<Cluster> piecesForThisCluster = new HashSet<Cluster>();
+ piecesForThisCluster.add(clus);
+ unusedUnmatchedClusterPieces.remove(clus);
+ List<Cluster> componentsInPreviousTier = new Vector<Cluster>();
+ componentsInPreviousTier.add(clus);
+ for (int iTier=0; iTier<25; iTier++) { // 25 is arbitrary cap in case we get stuck.
+ List<Cluster> componentsInNextTier = new Vector<Cluster>();
+ for (Cluster miniSeed : componentsInPreviousTier) {
+ // Search for links above threshold
+ List<ScoredLink> potentialLinks = m_potentialLinks.get(miniSeed);
+ if (potentialLinks != null) {
+ for (ScoredLink link : potentialLinks) {
+ if (link.score() < threshold) { break ; }
+ Cluster newClus = link.counterpart(miniSeed);
+ if (newClus == null) { throw new AssertionError("Null link!"); }
+ // Ensure that we haven't already added component
+ if (unusedUnmatchedClusterPieces.contains(newClus)) {
+ componentsInNextTier.add(newClus);
+ unusedUnmatchedClusterPieces.remove(newClus);
+ piecesForThisCluster.add(newClus);
+ }
+ }
+ }
+ }
+ componentsInPreviousTier = componentsInNextTier;
+ if (componentsInNextTier.size()==0) { break; }
+ }
+ Cluster newShower = makeCombinedCluster(piecesForThisCluster);
+ neutralClusterCores.add(newShower);
+ }
+ }
+
+ // Make reconstructed particles from the neutral hadron showers:
+
+ for (Cluster clus : neutralClusterCores) {
+ // write out neutral hadron
+ BaseReconstructedParticle part = new BaseReconstructedParticle();
+ part.addCluster(clus);
+ // Add fuzzy hits
+ Cluster sharedHitClus = makeClusterOfSharedHits(clus, allSharedClusters);
+ if (sharedHitClus.getCalorimeterHits().size()>0) { part.addCluster(sharedHitClus); }
+ double clusterEnergy = energy(clus, allSharedClusters, m_neutralCalib); // CHECK: Is this OK?
+ double clusterMomentumMagSq = clusterEnergy*clusterEnergy - mass_K0*mass_K0;
+ if (clusterMomentumMagSq<0.0) { clusterMomentumMagSq = 0.0; }
+ Hep3Vector pos = new BasicHep3Vector(clus.getPosition());
+ Hep3Vector threeMomentum = VecOp.mult(Math.sqrt(clusterMomentumMagSq), VecOp.unit(pos)); // assume it comes from the IP
+ HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
+ part.set4Vector(fourMomentum);
+ part.setReferencePoint(0,0,0);
+ part.setCharge(0);
+ // Set the PID and mass
+ part.addParticleID(pid_K0);
+ part.setParticleIdUsed(pid_K0);
+ part.setMass(mass_K0);
+ // Add to the output list
+ outputParticleList.add(part);
+ outputNeutralHadronParticleList.add(part);
+ }
+
+ // Write out
+ m_event.put(m_outputParticleListName, outputParticleList);
+ m_event = null;
+ }
+
+ // In this weighting scheme
+ // * Components not in the same DTree cluster get a significant penalty factor
+ // * Components in the same DTree cluster get a small additive bonus (to take the minimum above zero)
+ protected class DTreeClusterSharingAlgorithm implements ClusterSharingAlgorithm{
+ double m_minimumDistance;
+ double m_maximumDistance;
+ Map<Cluster,Cluster> m_treeOfComponent;
+ // Constructor supplies a list of DTree clusters
+ public DTreeClusterSharingAlgorithm(Map<Cluster,Cluster> treeOfComponent, double minimumDistance, double maximumDistance) {
+ m_treeOfComponent = treeOfComponent;
+ m_minimumDistance = minimumDistance;
+ m_maximumDistance = maximumDistance;
+ }
+ public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
+ Cluster parentDTreeCluster = m_treeOfComponent.get(clusterToShare);
+ if (parentDTreeCluster == null) { throw new AssertionError("ERROR: DTreeClusterSharingAlgorithm was passed a cluster that's not part of a DTree."); }
+ Map<Cluster,Double> outputMap = new HashMap<Cluster,Double>();
+ for (Cluster target : clusterTargets) {
+ double distance = proximity(clusterToShare, target);
+ if (distance == 0.0) { throw new AssertionError("ERROR: Distance is zero... configuration error"); }
+ double scaledDistance = distance / m_minimumDistance;
+ double weight = 1.0 / (scaledDistance*scaledDistance*scaledDistance);
+ if (weight > 1.0) {
+ // Don't go above 1 based on proximity
+ weight = 1.0;
+ }
+ // Now, is the target part of the same DTreeCluster?
+ // This may return null if the target is something that doesn't come from
+ // a DTreeCluster, e.g. a photon
+ Cluster parentDTreeClusterOfTarget = m_treeOfComponent.get(target);
+ if (parentDTreeClusterOfTarget == parentDTreeCluster) {
+ // From same DTreeCluster => apply additive bonus and don't impose a distance cutoff
+ weight += 0.05;
+ outputMap.put(target, new Double(weight));
+ } else {
+ // Not from same DTreeCluster => apply penalty factor and impose a distance cutoff
+ if (distance < m_maximumDistance) {
+ weight *= 0.2;
+ outputMap.put(target, new Double(weight));
+ }
+ }
+ }
+ return outputMap;
+ }
+ }
+}
\ No newline at end of file