Commit in lcsim/src/org/lcsim/contrib/uiowa/devel on MAIN
PFA.java+265added 1.1
Development version of PFA

lcsim/src/org/lcsim/contrib/uiowa/devel
PFA.java added at 1.1
diff -N PFA.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ PFA.java	6 Jan 2006 23:02:47 -0000	1.1
@@ -0,0 +1,265 @@
+package devel;
+
+import java.util.List;
+import java.util.Vector;
+
+import hep.physics.vec.*;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.MCParticle;
+import org.lcsim.recon.cluster.mst.*;
+
+import structural.*;
+import structural.likelihood.*;
+
+/**
+ * An example PFA using the structual algorithm.
+ *
+ * @version $Id: PFA.java,v 1.1 2006/01/06 23:02:47 mcharles Exp $
+ */
+
+public class PFA extends Driver
+{
+    // Special treatment for likelihood quantities that need per-event info
+    // (e.g. geometry)
+    List<StructuralLikelihoodQuantityWithEventInfo> m_perEventQuantities = null;
+
+    TestFragmentIdentifier  m_wrappedID = null;
+
+    public void suspend()
+    {
+	if (m_wrappedID != null) {
+	    m_wrappedID.commit();
+	}
+	super.suspend();
+    }
+
+    public PFA()
+    {
+	boolean writeLikelihood = false;
+
+        // Step 1 (here):
+        //   For each MC particle, make a helix swimmer.
+	//   Requires B field.
+        //   Output: Helix swimmer and MC particle (linked)
+        // Step 2:
+        //   For each helix, extrapolate it to the calorimeter
+        //   surfaces (barrel and endcap).
+	//   Requires geometry information.
+        //   Output: Position and momentum for each particle/swimmer
+        // Step 3:
+        //   For each helix that reaches an ECAL, look for
+        //   a MIP stub or cluster nearby.
+	//   Output: Bi-directional maps from MCParticle/swimmer to cluster
+	// Step 4:
+	//   (a) Handle MCParticles which weren't mapped properly
+        //   (b) Look at E/P for MCParticles which were mapped.
+	//
+	String nameOfHelixList = new String("HelixList");
+	String nameOfExtrapolationInfoList = new String("HelixExtrapolatedToECALList");
+	String nameOfHelixToClusterMap = new String("HelixToClusterMap");
+	String nameOfClusterToHelixMap = new String("ClusterToHelixMap");
+        add(new MakeHelixSwimmersFromTruth(nameOfHelixList)); // step 1 and 2
+	add(new SwimToECAL(nameOfHelixList, nameOfExtrapolationInfoList)); // step 2
+
+	// Begin with a big-scale cluster set, made with the MST:
+	Metrics geomDist = new GeometricalDistance();
+	Metrics hitHitDist = new MinimumHitToHitDistance();
+
+	MSTClusterDriver mstDriverEcal = new MSTClusterDriver("EMCal");
+	MSTClusterDriver mstDriverHcal = new MSTClusterDriver("HCal");
+	mstDriverEcal.registerMetrics(geomDist);
+	mstDriverHcal.registerMetrics(geomDist);
+	mstDriverEcal.setThreshold(30.0); // 3cm
+	mstDriverHcal.setThreshold(100.0); // 10cm
+	mstDriverEcal.setClusterName("MSTCluster EMCal");
+	mstDriverHcal.setClusterName("MSTCluster HCal");
+	add(mstDriverEcal);
+	add(mstDriverHcal);
+
+	// Find MIPs within clusters. Have to do this before merging ECAL and HCAL
+	// because the MIP-finder is layer-based -- we don't want to confuse layer n
+	// of the ECAL with layer n of the HCAL.
+	TrackSegmentFinder findTracksEcal = new TrackSegmentFinder("MSTCluster EMCal", "Track segments EMCal");
+	TrackSegmentFinder findTracksHcal = new TrackSegmentFinder("MSTCluster HCal", "Track segments HCal");
+	add(findTracksEcal);
+	add(findTracksHcal);
+
+	// Try to link the tracks from the tracking system to MIP segments and/or clusters:
+        add(new MatchHelixToCluster(nameOfExtrapolationInfoList, "Track segments EMCal", "MSTCluster EMCal", nameOfHelixToClusterMap, nameOfClusterToHelixMap));
+
+	// Now, link them together across the ECAL-HCAL boundary:
+	MSTClusterDriver mstDriverLink = new MSTClusterDriver("User");
+	mstDriverLink.registerMetrics(hitHitDist);
+	mstDriverLink.setThreshold(50.0); // 5cm
+	mstDriverLink.addUserInputList("MSTCluster EMCal");
+	mstDriverLink.addUserInputList("MSTCluster HCal");
+	mstDriverLink.setClusterName("MSTCluster linked");
+	add(mstDriverLink);
+
+	// Special thing: Map MIPs to clusters correctly after this cluster linking...
+	Remapper<Cluster> remapMIPs = new Remapper<Cluster>("MSTCluster linked", "Track segments linked");
+	remapMIPs.addInputClusters("MSTCluster EMCal", "Track segments EMCal");
+	remapMIPs.addInputClusters("MSTCluster HCal", "Track segments HCal");
+	remapMIPs.setDebug(true);
+	add(remapMIPs);
+	String nameOfClusterToHelixMapLinked = new String("ClusterToHelixMapLinked");
+	Remapper<TrackExtrapolationInfo> remapTracks = new Remapper<TrackExtrapolationInfo>("MSTCluster linked", nameOfClusterToHelixMapLinked);
+	remapTracks.addInputClusters("MSTCluster EMCal", nameOfClusterToHelixMap);
+	remapTracks.addInputClusters("MSTCluster HCal", nameOfClusterToHelixMap);
+	remapTracks.setDebug(true);
+	add(remapTracks);
+
+	// Find clumps within clusters
+	ClumpFinder findClumps = new ClumpFinder("MSTCluster linked", "Track segments linked", "Clumps");
+	add(findClumps);
+
+	// Run likelihood structural analysis
+	if (true) {
+	    ClusterAssociator assoc = new ClusterEnergyAssociator();
+	    if (writeLikelihood) {
+		// Obtain and write likelihood histograms
+		System.out.println("ExamplePFA: I will obtain and write out likelihood histograms.");
+		LikelihoodEvaluator eval = new LikelihoodEvaluator();
+		eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackDOCA(), 50, 0.0, 100.0, false, true);
+		eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackPOCAInCalorimeter(), 2, -0.5, 1.5, false, false);
+		eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackSmallestDistanceToPOCA(), 25, 0.0, 250.0, false, true);
+		//eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackIntermediateHitsCount(), 10, -0.5, 9.5, false, true);
+		//eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackIntermediateHitsFraction(), 11, -0.05, 1.05, false, false);
+		eval.addLikelihoodQuantityTrackToClump(new TrackToClumpDOCA(), 50, 0.0, 300.0, false, true);
+		eval.addLikelihoodQuantityTrackToClump(new ClusterToClusterMinDistance(), 25, 0.0, 250.0, false, true);
+		eval.addLikelihoodQuantityClumpToClump(new ClumpToClumpDOCA(), 20, 0.0, 200.0, false, true);
+		eval.addLikelihoodQuantityClumpToClump(new ClusterToClusterMinDistance(), 20, 0.0, 200.0, false, true);
+		// Handle things that have per-event info:
+		makeEventInfoList(eval);
+
+		LikelihoodFindingStructuralDriver likelihoodWriter = new LikelihoodFindingStructuralDriver(eval, assoc, "MSTCluster linked", "Track segments linked", "Clumps");
+		likelihoodWriter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
+		add(likelihoodWriter);
+                Driver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
+		add(checkpoint);
+	    } else {
+		// Use pre-existing likelihood histograms to check clusters.
+		// Output is, for each large cluster, a list of skeletons clusters inside it.
+		System.out.println("ExamplePFA: I will read in likelihood histograms and use them to make clusters.");
+		LikelihoodEvaluator eval = LikelihoodEvaluator.readFromFile("likelihood.bin");
+		LikelihoodLinkPlotterDriver likelihoodPlotter = new LikelihoodLinkPlotterDriver(eval, 0.5, 0.6, 0.8, assoc, "MSTCluster linked", "Track segments linked", "Clumps", "MapClustersToSkeletons");
+		likelihoodPlotter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
+		likelihoodPlotter.initPlots("likelihoodPerformance.aida");
+		add(likelihoodPlotter);
+		// Some likelihood quantities need per-event info:
+		makeEventInfoList(eval);
+		// Assign hits that are within the local cluster but not part of a clump/MIP.
+		add (new HaloAssigner("MapClustersToSkeletons"));
+		// Output is a list of clusters where:
+		//    A large cluster with multiple skeletons has a separate entry for each skeleton (+ associated halo)
+		//    A large cluster with one skeleton will have one entry
+		//    A cluster with no skeletons will have one entry
+		//    The total number of hits is the same as the total number of hits in "MSTCluster linked"
+		add (new MakeSeparatedClusters("MSTCluster linked", "MapClustersToSkeletons", "MSTCluster separated"));
+		// Handle fragments:
+		FragmentIdentifier nonCheatID = new SimpleFragmentIdentifier(nameOfHelixToClusterMap);
+		FragmentIdentifier    cheatID = new CheatFragmentIdentifier("MSTCluster separated");
+		m_wrappedID = new TestFragmentIdentifier(nonCheatID, cheatID);
+		add (new FragmentMerger("MSTCluster separated", "MSTCluster fragments merged", m_wrappedID)); // or nonCheatID
+		add (new FragmentMerger("MSTCluster separated", "MSTCluster fragments merged-cheat", cheatID));
+		//add (new TestFragmentMerger("MSTCluster separated", "MSTCluster fragments merged-cheat", cheatID, new CheatFragmentMerger("MSTCluster separated", "MSTCluster fragments cheated xxx", cheatID))); // test!
+		add (new FragmentRemover("MSTCluster separated", "MSTCluster fragments removed", nonCheatID));
+		add (new FragmentRemover("MSTCluster separated", "MSTCluster fragments removed-cheat", cheatID));
+		add (new CheatFragmentMerger("MSTCluster separated", "MSTCluster fragments cheated", cheatID));
+		// When done, check the total energy in the event
+		add (new EventEnergySum("MSTCluster fragments merged", nameOfClusterToHelixMapLinked, nameOfHelixToClusterMap, "EnergySumHistos-merged.aida"));
+		add (new EventEnergySum("MSTCluster fragments merged-cheat", nameOfClusterToHelixMapLinked, nameOfHelixToClusterMap, "EnergySumHistos-merged-cheat.aida"));
+		add (new EventEnergySum("MSTCluster fragments removed", nameOfClusterToHelixMapLinked, nameOfHelixToClusterMap, "EnergySumHistos-removed.aida"));
+		add (new EventEnergySum("MSTCluster fragments removed-cheat", nameOfClusterToHelixMapLinked, nameOfHelixToClusterMap, "EnergySumHistos-removed-cheat.aida"));
+		add (new EventEnergySum("MSTCluster fragments cheated", nameOfClusterToHelixMapLinked, nameOfHelixToClusterMap, "EnergySumHistos-cheated.aida"));
+		add (new EventEnergySum("MSTCluster linked", nameOfClusterToHelixMapLinked, nameOfHelixToClusterMap, "EnergySumHistos-all.aida"));
+		//add (new test.Test("MSTCluster linked")); // TEST!
+
+		List<String> knownClusterLists = new Vector<String>();
+		knownClusterLists.add("MSTCluster EMCal");
+		knownClusterLists.add("MSTCluster HCal");
+		knownClusterLists.add("MSTCluster linked");
+		knownClusterLists.add("MSTCluster separated");
+
+// 		add (new CheckStatusOfHitList("EcalBarrHits"));
+// 		add (new CheckStatusOfHitList("EcalEndcapHits"));
+// 		add (new CheckStatusOfHitList("HcalBarrHits"));
+// 		add (new CheckStatusOfHitList("HcalEndcapHits"));
+// 		add (new CheckStatusOfClusterList("MSTCluster EMCal", knownClusterLists));
+// 		add (new CheckStatusOfClusterList("MSTCluster HCal", knownClusterLists));
+// 		add (new CheckStatusOfClusterList("MSTCluster linked", knownClusterLists));
+// 		add (new CheckStatusOfClusterList("MSTCluster separated", knownClusterLists));
+	    }
+	}
+    }
+
+    int m_count = 0;
+    public void process(EventHeader event) {
+	System.out.println("DEBUG: "+this.getClass().getName()+": Event "+m_count+":"); m_count++;
+	// Special handling of things that need per-event info:
+	if (m_perEventQuantities != null) {
+	    for (StructuralLikelihoodQuantityWithEventInfo quant : m_perEventQuantities) {
+		quant.setEventInfo(event);
+	    }
+	}
+	// Here we can do a veto
+	if (checkEnergyBarrel(event, 0.9)) {
+	    if (checkEnergyNeutrinos(event, 100.0)) {
+		super.process(event);
+	    }
+	}
+    }
+
+    protected boolean checkEnergyNeutrinos(EventHeader event, double maxNeutrinoEnergy)
+    {
+        double truthNeutrinoEnergySum = 0.0;
+	List<MCParticle> eventMCParticles = event.getMCParticles();
+	for (MCParticle p : eventMCParticles) {
+            int pdg = p.getPDGID();
+            if (pdg==12 || pdg==14 || pdg==16 || pdg==18 || pdg==-12 || pdg==-14 || pdg==-16 || pdg==-18) {
+                truthNeutrinoEnergySum += p.getEnergy();
+            }
+	}
+	return (truthNeutrinoEnergySum <= maxNeutrinoEnergy); // <= in case they put 0 for the maximum
+    }
+
+    protected boolean checkEnergyBarrel(EventHeader event, double threshold) 
+    {
+	// Require that threshold (e.g. 90%) of the final-state particles are within the barrel ( cos(theta) < 0.8 )	
+	double energySumBarrel = 0.0;
+	double energySumNonBarrel = 0.0;
+	List<MCParticle> mcps = event.getMCParticles();
+	for (MCParticle mcp : mcps) {
+	    if (mcp.getGeneratorStatus() == mcp.FINAL_STATE) {
+		Hep3Vector momentum = mcp.getMomentum();
+		double cosTheta = momentum.z() / momentum.magnitude();
+		if (Math.abs(cosTheta) < 0.8) {
+		    // barrel
+		    energySumBarrel += mcp.getEnergy();
+		} else {
+		    // non-barrel
+		    energySumNonBarrel += mcp.getEnergy();
+		}
+	    }
+	}
+	double energySumTotal = energySumBarrel + energySumNonBarrel;
+	return (energySumBarrel / energySumTotal > threshold);
+    }
+    
+    protected void makeEventInfoList(LikelihoodEvaluator eval) 
+    {
+	// Handle things that have per-event info:
+	m_perEventQuantities = new Vector<StructuralLikelihoodQuantityWithEventInfo>();
+	List<StructuralLikelihoodQuantity> quantities = eval.getLikelihoodQuantities();
+	for (StructuralLikelihoodQuantity quant : quantities) {
+	    if (quant instanceof StructuralLikelihoodQuantityWithEventInfo) {
+		StructuralLikelihoodQuantityWithEventInfo quantTmp = (StructuralLikelihoodQuantityWithEventInfo) (quant);
+		m_perEventQuantities.add(quantTmp);
+	    }
+	}
+    }
+
+}
CVSspam 0.2.8