10 modified files
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.2 -r1.3
--- ClumpFinder.java 19 Oct 2005 19:44:21 -0000 1.2
+++ ClumpFinder.java 16 Dec 2005 21:12:12 -0000 1.3
@@ -26,7 +26,7 @@
* or more for each hit. This is hard-coded at the moment, but
* should become a user-definable in a later version.
*
- * @version $Id: ClumpFinder.java,v 1.2 2005/10/19 19:44:21 mcharles Exp $
+ * @version $Id: ClumpFinder.java,v 1.3 2005/12/16 21:12:12 mcharles Exp $
*/
public class ClumpFinder extends Driver
@@ -67,13 +67,9 @@
// The output is a map from a cluster (the big cluster) to a list of clusters (the clumps in that big cluster)
MapClusterToListOfClusters outputMap = new MapClusterToListOfClusters();
// We also use a map from clusters to track segments:
- List<MapClusterToListOfClusters> inputTrackMapList = event.get(MapClusterToListOfClusters.class, m_trackMapName);
- MapClusterToListOfClusters inputTrackMap = null;
- if (inputTrackMapList != null) {
- if (inputTrackMapList.iterator().hasNext()) {
- inputTrackMap = inputTrackMapList.iterator().next();
- }
- }
+ List<Object> dummyInputList = event.get(Object.class, m_trackMapName);
+ Object dummyInputObj = dummyInputList.iterator().next();
+ Map<Cluster, List<Cluster>> inputTrackMap = (Map<Cluster, List<Cluster>>) (dummyInputObj);
for (Cluster bigCluster : inputList) {
// Handle each big cluster. Here is the list of clumps:
@@ -112,9 +108,9 @@
outputMap.put(bigCluster, clumps);
}
// Dummy for writeout
- List<MapClusterToListOfClusters> dummyList = new Vector<MapClusterToListOfClusters> ();
- dummyList.add(outputMap);
- event.put(m_clumpMapName, dummyList);
+ List<MapClusterToListOfClusters> dummyOutputList = new Vector<MapClusterToListOfClusters> ();
+ dummyOutputList.add(outputMap);
+ event.put(m_clumpMapName, dummyOutputList);
}
/**
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.2 -r1.3
--- ClusterAssociator.java 14 Oct 2005 17:53:55 -0000 1.2
+++ ClusterAssociator.java 16 Dec 2005 21:12:13 -0000 1.3
@@ -2,8 +2,10 @@
import org.lcsim.event.Cluster;
import org.lcsim.event.EventHeader;
+import org.lcsim.event.MCParticle;
public interface ClusterAssociator
{
+ public boolean isLinkCorrect(Cluster clus1, MCParticle part, EventHeader event);
public boolean isLinkCorrect(Cluster clus1, Cluster clus2, EventHeader event);
}
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.1 -r1.2
--- ClusterEnergyAssociator.java 14 Oct 2005 17:54:51 -0000 1.1
+++ ClusterEnergyAssociator.java 16 Dec 2005 21:12:13 -0000 1.2
@@ -36,4 +36,27 @@
// Failure
return false;
}
+
+ public boolean isLinkCorrect(Cluster clus1, MCParticle part, EventHeader event)
+ {
+ if (event != m_cacheEvent) {
+ m_cacheEvent = event;
+ m_cacheAssociator = new EnergyAssociator(event);
+ }
+
+ if (part != null) {
+ List<MCParticle> mc1 = m_cacheAssociator.associateClusterToMCParticles(clus1);
+ if (mc1 != null) {
+ if (mc1.size()>0) {
+ MCParticle dominant1 = mc1.iterator().next();
+ if (dominant1 == part) {
+ // Match!
+ return true;
+ }
+ }
+ }
+ }
+ // Failure
+ return false;
+ }
}
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.5 -r1.6
--- ExamplePFA.java 19 Oct 2005 19:43:09 -0000 1.5
+++ ExamplePFA.java 16 Dec 2005 21:12:13 -0000 1.6
@@ -3,8 +3,12 @@
import java.util.List;
import java.util.Vector;
+import hep.physics.vec.*;
+
import org.lcsim.event.EventHeader;
import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.MCParticle;
import structural.likelihood.*;
import mst.MSTClusterDriver;
@@ -15,7 +19,7 @@
/**
* An example PFA using the structual algorithm.
*
- * @version $Id: ExamplePFA.java,v 1.5 2005/10/19 19:43:09 mcharles Exp $
+ * @version $Id: ExamplePFA.java,v 1.6 2005/12/16 21:12:13 mcharles Exp $
*/
public class ExamplePFA extends Driver
@@ -26,6 +30,30 @@
public ExamplePFA(boolean writeLikelihood)
{
+ // Step 1 (here):
+ // For each MC particle, make a helix swimmer.
+ // Requires B field.
+ // Output: Helix swimmer and MC particle (linked)
+ // Step 2:
+ // For each helix, extrapolate it to the calorimeter
+ // surfaces (barrel and endcap).
+ // Requires geometry information.
+ // Output: Position and momentum for each particle/swimmer
+ // Step 3:
+ // For each helix that reaches an ECAL, look for
+ // a MIP stub or cluster nearby.
+ // Output: Bi-directional maps from MCParticle/swimmer to cluster
+ // Step 4:
+ // (a) Handle MCParticles which weren't mapped properly
+ // (b) Look at E/P for MCParticles which were mapped.
+ //
+ String nameOfHelixList = new String("HelixList");
+ String nameOfExtrapolationInfoList = new String("HelixExtrapolatedToECALList");
+ String nameOfHelixToClusterMap = new String("HelixToClusterMap");
+ String nameOfClusterToHelixMap = new String("ClusterToHelixMap");
+ add(new MakeHelixSwimmersFromTruth(nameOfHelixList)); // step 1 and 2
+ add(new SwimToECAL(nameOfHelixList, nameOfExtrapolationInfoList)); // step 2
+
// Begin with a big-scale cluster set, made with the MST:
Metrics geomDist = new GeometricalDistance();
Metrics hitHitDist = new MinimumHitToHitDistance();
@@ -49,6 +77,9 @@
add(findTracksEcal);
add(findTracksHcal);
+ // Try to link the tracks from the tracking system to MIP segments and/or clusters:
+ add(new MatchHelixToCluster(nameOfExtrapolationInfoList, "Track segments EMCal", "MSTCluster EMCal", nameOfHelixToClusterMap, nameOfClusterToHelixMap));
+
// Now, link them together across the ECAL-HCAL boundary:
MSTClusterDriver mstDriverLink = new MSTClusterDriver("User");
mstDriverLink.registerMetrics(hitHitDist);
@@ -59,9 +90,16 @@
add(mstDriverLink);
// Special thing: Map MIPs to clusters correctly after this cluster linking...
- Remapper remapTracks = new Remapper("MSTCluster linked", "Track segments linked");
- remapTracks.addInputClusters("MSTCluster EMCal", "Track segments EMCal");
- remapTracks.addInputClusters("MSTCluster HCal", "Track segments HCal");
+ Remapper<Cluster> remapMIPs = new Remapper<Cluster>("MSTCluster linked", "Track segments linked");
+ remapMIPs.addInputClusters("MSTCluster EMCal", "Track segments EMCal");
+ remapMIPs.addInputClusters("MSTCluster HCal", "Track segments HCal");
+ remapMIPs.setDebug(true);
+ add(remapMIPs);
+ String nameOfClusterToHelixMapLinked = new String("ClusterToHelixMapLinked");
+ Remapper<TrackExtrapolationInfo> remapTracks = new Remapper<TrackExtrapolationInfo>("MSTCluster linked", nameOfClusterToHelixMapLinked);
+ remapTracks.addInputClusters("MSTCluster EMCal", nameOfClusterToHelixMap);
+ remapTracks.addInputClusters("MSTCluster HCal", nameOfClusterToHelixMap);
+ remapTracks.setDebug(true);
add(remapTracks);
// Find clumps within clusters
@@ -75,15 +113,15 @@
// Obtain and write likelihood histograms
System.out.println("ExamplePFA: I will obtain and write out likelihood histograms.");
LikelihoodEvaluator eval = new LikelihoodEvaluator();
- eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackDOCA(), 10, 0.0, 100.0, false, true);
+ eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackDOCA(), 50, 0.0, 100.0, false, true);
eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackPOCAInCalorimeter(), 2, -0.5, 1.5, false, false);
- eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackSmallestDistanceToPOCA(), 5, 0.0, 50.0, false, true);
+ eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackSmallestDistanceToPOCA(), 25, 0.0, 250.0, false, true);
//eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackIntermediateHitsCount(), 10, -0.5, 9.5, false, true);
//eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackIntermediateHitsFraction(), 11, -0.05, 1.05, false, false);
- eval.addLikelihoodQuantityTrackToClump(new TrackToClumpDOCA(), 5, 0.0, 250.0, false, true);
- eval.addLikelihoodQuantityTrackToClump(new ClusterToClusterMinDistance(), 5, 0.0, 250.0, false, true);
- eval.addLikelihoodQuantityClumpToClump(new ClumpToClumpDOCA(), 5, 0.0, 500.0, false, true);
- eval.addLikelihoodQuantityClumpToClump(new ClusterToClusterMinDistance(), 5, 0.0, 500.0, false, true);
+ eval.addLikelihoodQuantityTrackToClump(new TrackToClumpDOCA(), 50, 0.0, 300.0, false, true);
+ eval.addLikelihoodQuantityTrackToClump(new ClusterToClusterMinDistance(), 25, 0.0, 250.0, false, true);
+ eval.addLikelihoodQuantityClumpToClump(new ClumpToClumpDOCA(), 20, 0.0, 200.0, false, true);
+ eval.addLikelihoodQuantityClumpToClump(new ClusterToClusterMinDistance(), 20, 0.0, 200.0, false, true);
// Handle things that have per-event info:
makeEventInfoList(eval);
@@ -93,26 +131,101 @@
Driver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
add(checkpoint);
} else {
- // Use pre-existing likelihood histograms to check clusters
+ // Use pre-existing likelihood histograms to check clusters.
+ // Output is, for each large cluster, a list of skeletons clusters inside it.
System.out.println("ExamplePFA: I will read in likelihood histograms and use them to make clusters.");
LikelihoodEvaluator eval = LikelihoodEvaluator.readFromFile("likelihood.bin");
- LikelihoodLinkPlotterDriver likelihoodPlotter = new LikelihoodLinkPlotterDriver(eval, 0.5, 0.5, 0.5, assoc, "MSTCluster linked", "Track segments linked", "Clumps", "MapClustersToSkeletons");
+ LikelihoodLinkPlotterDriver likelihoodPlotter = new LikelihoodLinkPlotterDriver(eval, 0.5, 0.6, 0.8, assoc, "MSTCluster linked", "Track segments linked", "Clumps", "MapClustersToSkeletons");
likelihoodPlotter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
likelihoodPlotter.initPlots("likelihoodPerformance.aida");
add(likelihoodPlotter);
-
- // Handle things that have per-event info:
+ // Some likelihood quantities need per-event info:
makeEventInfoList(eval);
+ // Assign hits that are within the local cluster but not part of a clump/MIP.
+ add (new HaloAssigner("MapClustersToSkeletons"));
+ // Output is a list of clusters where:
+ // A large cluster with multiple skeletons has a separate entry for each skeleton (+ associated halo)
+ // A large cluster with one skeleton will have one entry
+ // A cluster with no skeletons will have one entry
+ // The total number of hits is the same as the total number of hits in "MSTCluster linked"
+ add (new MakeSeparatedClusters("MSTCluster linked", "MapClustersToSkeletons", "MSTCluster separated"));
+ // Handle fragments:
+ add (new FragmentMerger("MSTCluster separated", "MSTCluster fragments merged", new SimpleFragmentIdentifier(nameOfHelixToClusterMap)));
+ //add (new FragmentMerger("MSTCluster separated", "MSTCluster fragments merged", new CheatFragmentIdentifier("MSTCluster separated")));
+ //add (new FragmentRemover("MSTCluster separated", "MSTCluster fragments merged", new SimpleFragmentIdentifier(nameOfHelixToClusterMap)));
+ //add (new CheatFragmentMerger("MSTCluster separated", "MSTCluster fragments merged", new CheatFragmentIdentifier("MSTCluster separated")));
+ // When done, check the total energy in the event
+ add (new EventEnergySum("MSTCluster fragments merged", nameOfClusterToHelixMapLinked, nameOfHelixToClusterMap));
+
+ List<String> knownClusterLists = new Vector<String>();
+ knownClusterLists.add("MSTCluster EMCal");
+ knownClusterLists.add("MSTCluster HCal");
+ knownClusterLists.add("MSTCluster linked");
+ knownClusterLists.add("MSTCluster separated");
+
+// add (new CheckStatusOfHitList("EcalBarrHits"));
+// add (new CheckStatusOfHitList("EcalEndcapHits"));
+// add (new CheckStatusOfHitList("HcalBarrHits"));
+// add (new CheckStatusOfHitList("HcalEndcapHits"));
+// add (new CheckStatusOfClusterList("MSTCluster EMCal", knownClusterLists));
+// add (new CheckStatusOfClusterList("MSTCluster HCal", knownClusterLists));
+ add (new CheckStatusOfClusterList("MSTCluster linked", knownClusterLists));
+ add (new CheckStatusOfClusterList("MSTCluster separated", knownClusterLists));
}
}
}
+ int m_count = 0;
public void process(EventHeader event) {
+ System.out.println("DEBUG: "+this.getClass().getName()+": Event "+m_count+":"); m_count++;
// Special handling of things that need per-event info:
- for (StructuralLikelihoodQuantityWithEventInfo quant : m_perEventQuantities) {
- quant.setEventInfo(event);
+ if (m_perEventQuantities != null) {
+ for (StructuralLikelihoodQuantityWithEventInfo quant : m_perEventQuantities) {
+ quant.setEventInfo(event);
+ }
+ }
+ // Here we can do a veto
+ if (checkEnergyBarrel(event, 0.9)) {
+ if (checkEnergyNeutrinos(event, 100.0)) {
+ super.process(event);
+ }
+ }
+ }
+
+ protected boolean checkEnergyNeutrinos(EventHeader event, double maxNeutrinoEnergy)
+ {
+ double truthNeutrinoEnergySum = 0.0;
+ List<MCParticle> eventMCParticles = event.getMCParticles();
+ for (MCParticle p : eventMCParticles) {
+ int pdg = p.getPDGID();
+ if (pdg==12 || pdg==14 || pdg==16 || pdg==18 || pdg==-12 || pdg==-14 || pdg==-16 || pdg==-18) {
+ truthNeutrinoEnergySum += p.getEnergy();
+ }
+ }
+ return (truthNeutrinoEnergySum <= maxNeutrinoEnergy); // <= in case they put 0 for the maximum
+ }
+
+ protected boolean checkEnergyBarrel(EventHeader event, double threshold)
+ {
+ // Require that threshold (e.g. 90%) of the final-state particles are within the barrel ( cos(theta) < 0.8 )
+ double energySumBarrel = 0.0;
+ double energySumNonBarrel = 0.0;
+ List<MCParticle> mcps = event.getMCParticles();
+ for (MCParticle mcp : mcps) {
+ if (mcp.getGeneratorStatus() == mcp.FINAL_STATE) {
+ Hep3Vector momentum = mcp.getMomentum();
+ double cosTheta = momentum.z() / momentum.magnitude();
+ if (Math.abs(cosTheta) < 0.8) {
+ // barrel
+ energySumBarrel += mcp.getEnergy();
+ } else {
+ // non-barrel
+ energySumNonBarrel += mcp.getEnergy();
+ }
+ }
}
- super.process(event);
+ double energySumTotal = energySumBarrel + energySumNonBarrel;
+ return (energySumBarrel / energySumTotal > threshold);
}
protected void makeEventInfoList(LikelihoodEvaluator eval)
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.3 -r1.4
--- GenericStructuralDriver.java 14 Oct 2005 17:53:10 -0000 1.3
+++ GenericStructuralDriver.java 16 Dec 2005 21:12:13 -0000 1.4
@@ -49,10 +49,10 @@
// once and once only:
if (vMIPs != null) {
for (int iMIP=0; iMIP<vMIPs.size(); iMIP++) {
- Cluster track1 = (Cluster) (vMIPs.get(iMIP));
+ Cluster track1 = vMIPs.get(iMIP);
// Compare to other MIPs:
for (int jMIP=iMIP+1; jMIP<vMIPs.size(); jMIP++) {
- Cluster track2 = (Cluster) (vMIPs.get(jMIP));
+ Cluster track2 = vMIPs.get(jMIP);
compareTrackSegmentToTrackSegment(track1, track2);
}
}
@@ -67,8 +67,10 @@
}
// Now consider Clump-Clump links:
if (vClumps != null) {
- for (Cluster clump1 : vClumps) {
- for (Cluster clump2 : vClumps) {
+ for (int iClump=0; iClump<vClumps.size(); iClump++) {
+ Cluster clump1 = vClumps.get(iClump);
+ for (int jClump=iClump+1; jClump<vClumps.size(); jClump++) {
+ Cluster clump2 = vClumps.get(jClump);
compareClumpToClump(clump1, clump2);
}
}
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.3 -r1.4
--- LikelihoodLinkDriver.java 14 Oct 2005 17:51:26 -0000 1.3
+++ LikelihoodLinkDriver.java 16 Dec 2005 21:12:13 -0000 1.4
@@ -69,18 +69,21 @@
if (likelihood > m_cutTrackToTrack) {
m_vlinksTrackToTrack.add(new Link(clus1, clus2));
}
+ //System.out.println("DEBUG: "+this.getClass().getName()+": linking Track("+clus1.getCalorimeterHits().size()+") to Track("+clus2.getCalorimeterHits().size()+") likelihood="+likelihood+" (c.f. cut="+m_cutTrackToTrack+")");
}
public void compareTrackSegmentToClump(Cluster clus1, Cluster clus2) {
double likelihood = m_eval.getLinkLikelihoodTrackToClump(clus1, clus2);
if (likelihood > m_cutTrackToClump) {
m_vlinksTrackToClump.add(new Link(clus1, clus2));
}
+ //System.out.println("DEBUG: "+this.getClass().getName()+": linking Track("+clus1.getCalorimeterHits().size()+") to Clump("+clus2.getCalorimeterHits().size()+") likelihood="+likelihood+" (c.f. cut="+m_cutTrackToClump+")");
}
public void compareClumpToClump(Cluster clus1, Cluster clus2) {
double likelihood = m_eval.getLinkLikelihoodClumpToClump(clus1, clus2);
if (likelihood > m_cutClumpToClump) {
m_vlinksClumpToClump.add(new Link(clus1, clus2));
}
+ //System.out.println("DEBUG: "+this.getClass().getName()+": linking Clump("+clus1.getCalorimeterHits().size()+") to Clump("+clus2.getCalorimeterHits().size()+") likelihood="+likelihood+" (c.f. cut="+m_cutClumpToClump+")");
}
public void initializeEvent() {
@@ -127,6 +130,10 @@
List<MapClusterToListOfClusters> tmpMapList = m_event.get(MapClusterToListOfClusters.class, m_outputMapName);
MapClusterToListOfClusters tmpMap = tmpMapList.iterator().next();
tmpMap.put(bigClus, vLinkedClusters);
+ // Debug printout:
+ //System.out.println("DEBUG: "+this.getClass().getName()+": Studied a cluster with "+bigClus.getCalorimeterHits().size()+" hits. Found "+vClumps.size()+" clumps and "+vMIPs.size()+" MIPs. Made "+vLinkedClusters.size()+" linked clusters. I had "+m_vlinksTrackToTrack.size()+" MIP-MIP links, "+m_vlinksTrackToClump.size()+" MIP-Clump links and "+m_vlinksClumpToClump.size()+" Clump-Clump links.");
+
+
}
void recursivelyAddTrack(Cluster currentMIP, Set<Cluster> linkedClusters, Set<Cluster> unassignedMIPs, Set<Cluster> unassignedClumps) {
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.2 -r1.3
--- LikelihoodLinkPlotterDriver.java 14 Oct 2005 17:50:34 -0000 1.2
+++ LikelihoodLinkPlotterDriver.java 16 Dec 2005 21:12:13 -0000 1.3
@@ -81,9 +81,10 @@
} else {
m_hTrackClumpBckgnd.fill(likelihood);
}
- if (likelihood > m_cutTrackToClump) {
- m_vlinksTrackToClump.add(new Link(clus1, clus2));
- }
+ super.compareTrackSegmentToClump(clus1, clus2);
+ //if (likelihood > m_cutTrackToClump) {
+ //m_vlinksTrackToClump.add(new Link(clus1, clus2));
+ //}
}
public void compareClumpToClump(Cluster clus1, Cluster clus2)
{
@@ -93,9 +94,10 @@
} else {
m_hClumpClumpBckgnd.fill(likelihood);
}
- if (likelihood > m_cutClumpToClump) {
- m_vlinksClumpToClump.add(new Link(clus1, clus2));
- }
+ super.compareClumpToClump(clus1, clus2);
+ //if (likelihood > m_cutClumpToClump) {
+ //m_vlinksClumpToClump.add(new Link(clus1, clus2));
+ //}
}
protected boolean determineIfLinkIsCorrect(Cluster clus1, Cluster clus2)
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.1 -r1.2
--- Link.java 29 Sep 2005 21:04:48 -0000 1.1
+++ Link.java 16 Dec 2005 21:12:13 -0000 1.2
@@ -18,7 +18,9 @@
} else {
return null;
}
- }
+ }
+ public Cluster getFirst() { return c1; }
+ public Cluster getSecond() { return c2; }
protected Cluster c1 = null;
protected Cluster c2 = null;
}
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.1 -r1.2
--- MapClusterToListOfClusters.java 1 Oct 2005 01:36:22 -0000 1.1
+++ MapClusterToListOfClusters.java 16 Dec 2005 21:12:13 -0000 1.2
@@ -6,7 +6,7 @@
import java.util.List;
import org.lcsim.event.Cluster;
-class MapClusterToListOfClusters extends HashMap<Cluster, List<Cluster>>
+public class MapClusterToListOfClusters extends HashMap<Cluster, List<Cluster>>
{
}
lcsim/src/org/lcsim/contrib/uiowa/structural
diff -u -r1.4 -r1.5
--- Remapper.java 19 Oct 2005 18:55:16 -0000 1.4
+++ Remapper.java 16 Dec 2005 21:12:13 -0000 1.5
@@ -16,10 +16,10 @@
* merging Clusters. This is needed when merging ECAL
* and HCAL clusters, for example.
*
- * @version $Id: Remapper.java,v 1.4 2005/10/19 18:55:16 mcharles Exp $
+ * @version $Id: Remapper.java,v 1.5 2005/12/16 21:12:13 mcharles Exp $
*/
-class Remapper extends Driver
+class Remapper<T> extends Driver
{
protected String m_clusterListName;
protected String m_mapName;
@@ -56,18 +56,21 @@
*/
public void process(EventHeader event) {
if (m_clusterSubListNames.size() != m_subMapNames.size()) { throw new AssertionError("Validity check failed"); }
- MapClusterToListOfClusters mapClustersToTracks = new MapClusterToListOfClusters();
+ Map<Cluster, List<T>> outputMap = new HashMap<Cluster, List<T>>();
List<Cluster> bigClusters = event.get(Cluster.class, m_clusterListName);
+ // Loop over the lists of sub-clusters:
for (int i=0; i<m_clusterSubListNames.size(); i++) {
String clusterSubListName = m_clusterSubListNames.get(i);
String subMapName = m_subMapNames.get(i);
List<Cluster> subClusters = event.get(Cluster.class, clusterSubListName);
- List<MapClusterToListOfClusters> dummyList = event.get(MapClusterToListOfClusters.class, subMapName);
- MapClusterToListOfClusters map = dummyList.iterator().next();
+ // This next bit is ugly, due to a Java/org.lcsim language barrier:
+ List<Object> dummyListObj = event.get(Object.class, subMapName);
+ Object dummyObj = dummyListObj.iterator().next();
+ Map<Cluster, List<T>> map = (Map<Cluster, List<T>>) (dummyObj);
// OK. We have a list of clusters, which feed into the merged clusters.
// For each sub-cluster with associated MIPs, find the big cluster it went into
for (Cluster subCluster : subClusters) {
- List<Cluster> tracksMatchedToThisSubCluster = map.get(subCluster);
+ List<T> tracksMatchedToThisSubCluster = map.get(subCluster);
if (tracksMatchedToThisSubCluster != null && tracksMatchedToThisSubCluster.size()>0) {
Cluster bigClusterItWentInto = null;
for (Cluster bigCluster : bigClusters) {
@@ -84,10 +87,10 @@
}
if (bigClusterItWentInto != null) {
// Mapped OK
- List<Cluster> tracksMappedToThisBigCluster = mapClustersToTracks.get(bigClusterItWentInto);
+ List<T> tracksMappedToThisBigCluster = outputMap.get(bigClusterItWentInto);
if (tracksMappedToThisBigCluster == null) {
- tracksMappedToThisBigCluster = new Vector<Cluster>();
- mapClustersToTracks.put(bigClusterItWentInto, tracksMappedToThisBigCluster);
+ tracksMappedToThisBigCluster = new Vector<T>();
+ outputMap.put(bigClusterItWentInto, tracksMappedToThisBigCluster);
}
tracksMappedToThisBigCluster.addAll(tracksMatchedToThisSubCluster);
} else {
@@ -100,8 +103,8 @@
}
}
// Dummy for writeout
- List<MapClusterToListOfClusters> dummyList = new Vector<MapClusterToListOfClusters> ();
- dummyList.add(mapClustersToTracks);
+ List<Map<Cluster,List<T>>> dummyList = new Vector<Map<Cluster,List<T>>>();
+ dummyList.add(outputMap);
event.put(m_mapName, dummyList);
if (m_debug) {
@@ -110,10 +113,10 @@
int debugNumSmallClusters = 0;
int debugNumMIPs = 0;
int debugNumClumps = 0;
- Set<Cluster> debugBigClusters = mapClustersToTracks.keySet();
+ Set<Cluster> debugBigClusters = outputMap.keySet();
for (Cluster currentBigCluster : debugBigClusters) {
debugNumBigClusters++;
- List<Cluster> mappedTracks = mapClustersToTracks.get(currentBigCluster);
+ List<T> mappedTracks = outputMap.get(currentBigCluster);
debugNumMIPs += mappedTracks.size();
debugNumSmallClusters += currentBigCluster.getClusters().size();
}
CVSspam 0.2.8