Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa/template on MAIN
NonTrivialPFA.java+374-1861.5 -> 1.6
Latest unstable PFA

lcsim/src/org/lcsim/contrib/uiowa/template
NonTrivialPFA.java 1.5 -> 1.6
diff -u -r1.5 -r1.6
--- NonTrivialPFA.java	8 Feb 2006 19:06:25 -0000	1.5
+++ NonTrivialPFA.java	18 Jul 2006 00:45:05 -0000	1.6
@@ -1,47 +1,88 @@
-package template;
+package org.lcsim.recon.pfa.structural;
 
 import java.util.*; 
+import hep.physics.vec.*;
 
 import org.lcsim.util.*;
+import org.lcsim.util.hitmap.*;
 import org.lcsim.event.*;
 import org.lcsim.event.util.*;
 import org.lcsim.recon.cluster.mst.*;
 import org.lcsim.recon.cluster.mipfinder.*;
 import org.lcsim.recon.cluster.clumpfinder.*;
+import org.lcsim.recon.cluster.structural.likelihood.*;
+import org.lcsim.digisim.DigiSimDriver;
+import org.lcsim.recon.cluster.util.CalHitMapDriver;
+import org.lcsim.digisim.SimCalorimeterHitsDriver;
 import org.lcsim.recon.cluster.util.ClusterSizeDecision;
+import org.lcsim.recon.cluster.analysis.*;
+import org.lcsim.event.util.CreateFinalStateMCParticleList;
+import org.lcsim.mc.fast.tracking.MCFastTracking;
+import org.lcsim.recon.pfa.identifier.SimpleNeutralParticleMaker;
+import org.lcsim.recon.pfa.identifier.SimpleChargedParticleMaker;
+//import org.lcsim.recon.pfa.identifier.DebugChargedParticleMaker;
+import org.lcsim.recon.cluster.util.BothCalorimetersDecision;
+import org.lcsim.recon.cluster.util.ClusterListFilterDriver;
+import org.lcsim.recon.cluster.util.VetoClustersFromParticles;
+import org.lcsim.recon.cluster.util.ParticleListToClusterListDriver;
+import org.lcsim.recon.pfa.output.EnergySumPlotter;
+import org.lcsim.recon.pfa.output.CorrectedEnergySumPlotter;
+import org.lcsim.recon.pfa.output.ConfusionPlotter;
+import org.lcsim.util.decision.ListFilterDriver;
+import org.lcsim.util.decision.ParticlePDGDecision;
+import org.lcsim.recon.cluster.cheat.PerfectClusterer;
+import org.lcsim.recon.cluster.structural.ChargedNeutralFragmentSeparator;
 
-import structural.likelihood.*;
+import org.lcsim.util.aida.AIDA;
 
 public class NonTrivialPFA extends Driver
 {
+    // I think this is needed for Ron's analysis:
+    private AIDA aida = AIDA.defaultInstance();
+
     public NonTrivialPFA()
     {
+	// Book-keeping
+	HitBookKeeper accountant = new HitBookKeeper();
+	accountant.setDebug(false);
+
+	// Set up the MC list
+	//
+	// Shift this bit into the process() routine:
+	CreateFinalStateMCParticleList mcListMakerGen = new CreateFinalStateMCParticleList("Gen");
+	CreateFinalStateMCParticleList mcListMakerSim = new CreateFinalStateMCParticleList("Sim");
+	add(mcListMakerGen);
+	add(mcListMakerSim);
+	String mcListNameGen = "GenFinalStateParticles";
+	String mcListNameSim = "SimFinalStateParticles";
+	String mcListName = mcListNameGen;
 
 	// Step 0: Run digisim
 
 	// CalHitMapDriver is needed by DigiSim
-	add(new org.lcsim.recon.cluster.util.CalHitMapDriver());
+	add(new CalHitMapDriver());
 	// DigiSim: SimCalHits -> RawCalHits
-	org.lcsim.digisim.DigiSimDriver digi = new org.lcsim.digisim.DigiSimDriver();
+	DigiSimDriver digi = new org.lcsim.digisim.DigiSimDriver();
 	add(digi);
 	// RawCalHits -> SimCalorimeterHits
-	add( new org.lcsim.digisim.SimCalorimeterHitsDriver() );
+	add( new SimCalorimeterHitsDriver() );
 
 	// Step 1: Set up input hit lists:
-	HitMapDriver hitmapEcal = new HitMapDriver();
+	HitListToHitMapDriver hitmapEcal = new HitListToHitMapDriver();
 	hitmapEcal.addInputList("EcalBarrDigiHits");
 	hitmapEcal.addInputList("EcalEndcapDigiHits");
 	hitmapEcal.setOutput("input hit map ecal");
-	HitMapDriver hitmapHcal = new HitMapDriver();
+	HitListToHitMapDriver hitmapHcal = new HitListToHitMapDriver();
 	hitmapHcal.addInputList("HcalBarrDigiHits");
 	hitmapHcal.addInputList("HcalEndcapDigiHits");
 	hitmapHcal.setOutput("input hit map hcal");
 	add(hitmapEcal);
 	add(hitmapHcal);
+	accountant.addListOfNamedLists( new String[] { "input hit map ecal", "input hit map hcal" } );
 
 	// Find tracks
 	// Output: List<Track> saved as EventHeader.TRACKS
-	add (new org.lcsim.mc.fast.tracking.MCFastTracking());
+	add (new MCFastTracking());
 
 	// Find track segments in ECAL and HCAL
 	TrackClusterDriver ecalMIP = new TrackClusterDriver("input hit map ecal", "mips ecal", "hit map ecal without mips");
@@ -49,43 +90,67 @@
 	add(ecalMIP);
 	add(hcalMIP);
         // Merge the two lists:
-        ListMerger<Cluster> mergeMIPs = new ListMerger<Cluster>();
+	ListAddDriver<Cluster> mergeMIPs = new ListAddDriver<Cluster>();
         mergeMIPs.addInputList("mips ecal");
         mergeMIPs.addInputList("mips hcal");
         mergeMIPs.setOutputList("mips");
 	add(mergeMIPs);
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips", "hit map hcal without mips", "mips ecal", "mips hcal" } );
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips", "hit map hcal without mips", "mips" } );
 
-	// Step 5b: Find clumps in ECAL and HCAL
+	// Find photons in the ECAL (cheat for now)
+	add(new ListFilterDriver(new ParticlePDGDecision(22), mcListName, "MCParticles photons only"));
+	PerfectClusterer myCheatPhotonFinder = new PerfectClusterer();
+	myCheatPhotonFinder.setInputHitMap("hit map ecal without mips");
+	myCheatPhotonFinder.setOutputHitMap("hit map ecal without mips or photons");
+	myCheatPhotonFinder.setOutputClusterList("photon clusters");
+	myCheatPhotonFinder.setMCParticleList("MCParticles photons only");
+	myCheatPhotonFinder.allowHitSharing(false);
+	add(myCheatPhotonFinder);
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips ecal", "mips hcal", "photon clusters" } );
+	// ID them
+	SimpleNeutralParticleMaker myPhotonIdentifier = new SimpleNeutralParticleMaker(22); // 22 = photon
+	myPhotonIdentifier.setInputClusterList("photon clusters");
+	myPhotonIdentifier.setOutputParticleList("photon particles");
+	add(myPhotonIdentifier);
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips ecal", "mips hcal", "photon particles" } );
+
+	// Find clumps in ECAL and HCAL
 	// Clumps are defined as contiguous sets of
 	// 6+ hits where the local hit density (in a 5x5x3 block) is 7/75
 	// or more for each hit. Track segment hits
 	// are NOT used.
-	ClumpFinder findClumpsECAL = new ClumpFinder("hit map ecal without mips", "clumps ecal", "hit map ecal without mips or clumps");
+	ClumpFinder findClumpsECAL = new ClumpFinder("hit map ecal without mips or photons", "clumps ecal", "hit map ecal without mips or photons or clumps");
 	ClumpFinder findClumpsHCAL = new ClumpFinder("hit map hcal without mips", "clumps hcal", "hit map hcal without mips or clumps");
         add(findClumpsECAL);
         add(findClumpsHCAL);
-        ListMerger<Cluster> mergeClumps = new ListMerger<Cluster>();
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "photon particles", "clumps ecal", "clumps hcal" } );
+	ListAddDriver<Cluster> mergeClumps = new ListAddDriver<Cluster>();
 	mergeClumps.addInputList("clumps ecal");
         mergeClumps.addInputList("clumps hcal");
         mergeClumps.setOutputList("clumps");
 	add(mergeClumps);
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "photon particles", "clumps" } );
 
-	// Step 3: Find large-scale clusters with the MST
+	// Step 3: Find large-scale hadronic clusters with the MST
 	// Output: List<Cluster>
-	NewMSTDriver mstEcal = new NewMSTDriver("ecal hit map after mst", "mst clusters ecal");
-	NewMSTDriver mstHcal = new NewMSTDriver("hcal hit map after mst", "mst clusters hcal");
-	mstEcal.addInputHitMap("hit map ecal without mips or clumps");
+	//NewMSTDriver mstEcal = new NewMSTDriver("ecal hit map after mst", "mst clusters ecal");
+	//NewMSTDriver mstHcal = new NewMSTDriver("hcal hit map after mst", "mst clusters hcal");
+	MSTClusterDriver mstEcal = new MSTClusterDriver("ecal hit map after mst", "mst clusters ecal");
+	MSTClusterDriver mstHcal = new MSTClusterDriver("hcal hit map after mst", "mst clusters hcal");
+	mstEcal.addInputHitMap("hit map ecal without mips or photons or clumps");
 	mstHcal.addInputHitMap("hit map hcal without mips or clumps");
-	mstEcal.addInputClusterList("mips ecal");
-	mstHcal.addInputClusterList("mips hcal");
-	mstEcal.addInputClusterList("clumps ecal");
-	mstHcal.addInputClusterList("clumps hcal");
+	mstEcal.addUserInputList("mips ecal");
+	mstHcal.addUserInputList("mips hcal");
+	mstEcal.addUserInputList("clumps ecal");
+	mstHcal.addUserInputList("clumps hcal");
 	mstEcal.setThreshold(30.0);
 	mstHcal.setThreshold(100.0);
         mstEcal.registerMetrics(new MinimumHitToHitDistance());
         mstHcal.registerMetrics(new MinimumHitToHitDistance());
 	add (mstEcal);
 	add (mstHcal);
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters ecal", "mst clusters hcal", "photon particles" } );
 
 	// Step 4: Link across the ECAL-HCAL boundary
 	// Input: List<Cluster> for ECAL
@@ -99,188 +164,271 @@
         mstDriverLink.setClusterName("mst clusters linked");
 	mstDriverLink.setPairDecision(new BothCalorimetersDecision());
         add(mstDriverLink);
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked", "photon particles" } );
 
 	// OK. Now we are going to weed out the small (<10 hit) clusters.
 	ClusterListFilterDriver sizeFilterDriver = new ClusterListFilterDriver();
 	sizeFilterDriver.setInputDecision(new ClusterSizeDecision(10));
-	sizeFilterDriver.setInputClusterList("mst clusters linked");
-	sizeFilterDriver.setOutputClusterListPass("mst clusters linked (>=10 hits)");
+	sizeFilterDriver.setInputList("mst clusters linked");
+	sizeFilterDriver.setOutputList("mst clusters linked (>=10 hits)");
 	sizeFilterDriver.setOutputClusterListFail("mst clusters linked (<10 hits)");
 	sizeFilterDriver.setOutputHitMapPass("hits from mst clusters linked (>=10 hits)");
 	sizeFilterDriver.setOutputHitMapFail("hits from mst clusters linked (<10 hits)");
 	add(sizeFilterDriver);
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "photon particles" } );
 
 	boolean writeLikelihood = false;
-	structural.ClusterAssociator assoc = new structural.ClusterEnergyAssociator();
 	if (writeLikelihood) {
-	  //   Step 6a: Make likelihood PDFs
-	  LikelihoodEvaluator eval = new LikelihoodEvaluator();
-	  eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackDOCA(), 50, 0.0, 100.0, false, true);
-	  eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackPOCAInCalorimeter(), 2, -0.5, 1.5, false, false);
-	  eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackSmallestDistanceToPOCA(), 25, 0.0, 250.0, false, true);
-	  eval.addLikelihoodQuantityTrackToClump(new TrackToClumpDOCA(), 50, 0.0, 300.0, false, true);
-	  eval.addLikelihoodQuantityTrackToClump(new ClusterToClusterMinDistance(), 25, 0.0, 250.0, false, true);
-	  eval.addLikelihoodQuantityClumpToClump(new ClumpToClumpDOCA(), 20, 0.0, 200.0, false, true);
-	  eval.addLikelihoodQuantityClumpToClump(new ClusterToClusterMinDistance(), 20, 0.0, 200.0, false, true);
-	  // Handle things that have per-event info:
-	  makeEventInfoList(eval); 
-	  structural.LikelihoodFindingStructuralDriver likelihoodWriter = new structural.LikelihoodFindingStructuralDriver(eval, assoc, "mst clusters linked (>=10 hits)", "mips", "clumps");
-	  add(likelihoodWriter);
-	  Driver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
-	  add(checkpoint);
+	    //   Step 6a: Make likelihood PDFs
+	    LikelihoodEvaluator eval = new LikelihoodEvaluator();
+	    eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackDOCA(), 50, 0.0, 100.0, false, true);
+	    eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackPOCAInCalorimeter(), 2, -0.5, 1.5, false, false);
+	    eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackSmallestDistanceToPOCA(), 25, 0.0, 250.0, false, true);
+	    eval.addLikelihoodQuantityTrackToClump(new TrackToClumpDOCA(), 50, 0.0, 300.0, false, true);
+	    eval.addLikelihoodQuantityTrackToClump(new ClusterToClusterMinDistance(), 25, 0.0, 250.0, false, true);
+	    eval.addLikelihoodQuantityClumpToClump(new ClumpToClumpDOCA(), 20, 0.0, 200.0, false, true);
+	    eval.addLikelihoodQuantityClumpToClump(new ClusterToClusterMinDistance(), 20, 0.0, 200.0, false, true);
+	    // Handle things that have per-event info:
+	    makeEventInfoList(eval); 
+	    org.lcsim.recon.cluster.structural.LikelihoodFindingStructuralDriver likelihoodWriter = new org.lcsim.recon.cluster.structural.LikelihoodFindingStructuralDriver(eval, "mst clusters linked (>=10 hits)", "mips", "clumps");
+	    String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
+	    String[] inputClusterListsForAssociator = {"mips", "clumps"};
+	    likelihoodWriter.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
+	    add(likelihoodWriter);
+	    Driver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
+	    add(checkpoint);
 	} else {
-	  // Step 6a: Structural algorithm
-	  // Inputs from event:
-	  //   * Big (MST) clusters
-	  //   * Track segment clusters
-	  //   * Clump clusters
-	  // Output to event:
-	  //   * List of skeleton clusters
-	  //   * Hitmap containing clustered hits that aren't in skeletons
-	  LikelihoodEvaluator eval = LikelihoodEvaluator.readFromFile("likelihood.bin");
-	  structural.LikelihoodLinkPlotterDriver likelihoodPlotter = new structural.LikelihoodLinkPlotterDriver(eval, 0.5, 0.6, 0.8, assoc, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
-	  likelihoodPlotter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
-	  likelihoodPlotter.initPlots("likelihoodPerformance.aida");
-	  add(likelihoodPlotter);
-	  // Some likelihood quantities need per-event info:
-	  makeEventInfoList(eval);
-	  // Step 6b: Halo
-	  // Inputs from event:
-	  //   * Skeleton clusters
-	  //   * Hitmap containing clustered hits that aren't in skeletons
-	  // Outputs:
-	  //   * Fleshed-out skeletons (with halo added)
-	  //   * Modified hitmap with any remaining clustered hits
-	  add(new template.HaloAssigner("skeletons", "structural unused hits", "skeletons plus halo", "structural unused hits minus halo"));
-	  // Step 6c: Extrapolate tracks to ECAL surface, form charged particles
-	  // Inputs:
-	  //   * The tracks (which we'll extrapolate)
-	  //   * The MIPs (which are our best chance of a good association)
-	  //   * The fleshed-out skeletons (to match the tracks to -- or to their MIP components -- and form particles)
-	  // Outputs:
-	  //   * Any unused tracks
-	  //   * The reconstructed particles
-	  ChargedHadronIdentifier hadID = new ChargedHadronIdentifier();
-	  hadID.setInputTrackList(EventHeader.TRACKS);
-	  hadID.setOutputTrackList("leftover tracks");
-	  hadID.setInputMIPList("mips");
-	  hadID.setInputClusterList("skeletons plus halo");
-	  hadID.setOutputParticleList("charged hadron particles");
-	  add(hadID);
-	  // Step 6d: Handle fragments
-	  // Inputs:
-	  //    * Charged particles ("charged hadron particles")
-	  //    * Skeleton clusters ("skeletons plus halo")
-	  //    * Small clusters ("mst clusters linked (<10 hits)")
-	  //    * Unassigned structural hits, if any ("structural unused hits minus halo")
-	  // Outputs:
-	  //    * Particles (one per merged cluster)
-	  //    * Clusters after merging in fragments ("merged clusters");
-	  //    * Any remaining unassigned hits("leftover hits after fragment merge")
-          FragmentMergeDriver fragMergeDriver = new FragmentMergeDriver();
-	  fragMergeDriver.addInputClusterList("skeletons plus halo");
-	  fragMergeDriver.addInputClusterList("mst clusters linked (<10 hits)");
-	  fragMergeDriver.addInputHitMap("structural unused hits minus halo");
-	  fragMergeDriver.addInputParticleList("charged hadron particles");
-	  fragMergeDriver.setOutputClusterList("clusters with fragments merged");
-	  fragMergeDriver.setOutputHitMap("hits left over after fragment merge");
-	  SimpleFragmentIdentifier fragID = new SimpleFragmentIdentifier();
-	  fragID.addParticleList("charged hadron particles");
-	  fragMergeDriver.setFragmentIdentifier(fragID);
-	  SimpleFragmentMerger fragMerge = new SimpleFragmentMerger();
-	  fragMergeDriver.setFragmentMerger(fragMerge);
-	  // specify fragment identifier, merger.
-          add(fragMergeDriver);
-	  // Repeat the hadron ID step with the revised cluster list:
-	  ChargedHadronIdentifier hadID2 = new ChargedHadronIdentifier();
-	  hadID2.setInputTrackList(EventHeader.TRACKS);
-	  hadID2.setOutputTrackList("leftover tracks 2");
-	  hadID2.setInputMIPList("mips");
-	  hadID2.setInputClusterList("clusters with fragments merged");
-	  hadID2.setOutputParticleList("charged hadron particles 2");
-	  add(hadID2);
-	  // ... and then any remaining clusters should be neutral
-	  ClusterListFilterDriver removeChargedClusters = new ClusterListFilterDriver();
-	  VetoClustersFromParticles vetoCharged = new VetoClustersFromParticles("charged hadron particles 2");
-	  removeChargedClusters.setInputDecision(vetoCharged);
-	  removeChargedClusters.setInputClusterList("clusters with fragments merged");
-	  removeChargedClusters.setOutputClusterListPass("neutral clusters with fragments merged");
-	  add(vetoCharged);
-	  add(removeChargedClusters);
-	  NeutralHadronIdentifier hadID3 = new NeutralHadronIdentifier();
-	  hadID3.setInputClusterList("neutral clusters with fragments merged");
-	  hadID3.setOutputParticleList("neutral hadron particles");
-	  add(hadID3);
-	  // Merge together all the particle lists:
-	  ListMerger<ReconstructedParticle> mergeParticles = new ListMerger<ReconstructedParticle>();
-	  mergeParticles.addInputList("charged hadron particles 2");
-	  mergeParticles.addInputList("neutral hadron particles");
-	  mergeParticles.setOutputList("hadron particles");
-	  add(mergeParticles);
-	  // Step 6e: Plots
+	    // Step 6a: Structural algorithm
+	    // Inputs from event:
+	    //   * Big (MST) clusters
+	    //   * Track segment clusters
+	    //   * Clump clusters
+	    // Output to event:
+	    //   * List of skeleton clusters
+	    //   * Hitmap containing clustered hits that aren't in skeletons
+	    LikelihoodEvaluator eval = LikelihoodEvaluator.readFromFile("likelihood.bin");
+	    // Some likelihood quantities need per-event info:
+	    makeEventInfoList(eval);
+	    
+	    //structural.LikelihoodLinkPlotterDriver likelihoodPlotter = new structural.LikelihoodLinkPlotterDriver(eval, 0.5, 0.6, 0.8, assoc, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
+	    //likelihoodPlotter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
+	    //likelihoodPlotter.initPlots("likelihoodPerformance.aida");
+	    //add(likelihoodPlotter);
+
+	    org.lcsim.recon.cluster.structural.LikelihoodLinkDriver likelihoodLinker = null;
+	    if (true) {
+		// cheat
+		likelihoodLinker = new org.lcsim.recon.cluster.structural.CheatLikelihoodLinkDriver("mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
+		String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
+		String[] inputClusterListsForAssociator = {"mips", "clumps"};
+		likelihoodLinker.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
+	    } else {
+		// don't cheat
+		if (false) {
+		    // no plots
+		    likelihoodLinker = new org.lcsim.recon.cluster.structural.LikelihoodLinkDriver(eval, 0.5, 0.5, 0.5, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
+		} else {
+		    // plots
+		    org.lcsim.recon.cluster.structural.LikelihoodLinkPlotDriver plotter = new org.lcsim.recon.cluster.structural.LikelihoodLinkPlotDriver(eval, 0.5, 0.5, 0.5, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
+		    plotter.initPlots("likelihood.aida");
+		    likelihoodLinker = plotter;
+		    String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
+		    String[] inputClusterListsForAssociator = {"mips", "clumps"};
+		    likelihoodLinker.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
+		}
+	    }
+	    likelihoodLinker.setIgnoreClusterDecision(new ClusterSizeDecision(10));
+	    add(likelihoodLinker);
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons", "structural unused hits", "mst clusters linked (<10 hits)", "photon particles" } );
+
+	    // Step 6b: Halo
+	    // Inputs from event:
+	    //   * Skeleton clusters
+	    //   * Hitmap containing clustered hits that aren't in skeletons
+	    // Outputs:
+	    //   * Fleshed-out skeletons (with halo added)
+	    //   * Modified hitmap with any remaining clustered hits
+	    add(new org.lcsim.recon.cluster.structural.HaloAssigner("skeletons", "structural unused hits", "skeletons plus halo", "structural unused hits minus halo"));
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons plus halo", "structural unused hits minus halo", "mst clusters linked (<10 hits)", "photon particles" } );
+
+	    // Step 6c: Extrapolate tracks to ECAL surface, form charged particles
+	    // Inputs:
+	    //   * The tracks (which we'll extrapolate)
+	    //   * The MIPs (which are our best chance of a good association)
+	    //   * The fleshed-out skeletons (to match the tracks to -- or to their MIP components -- and form particles)
+	    // Outputs:
+	    //   * Any unused tracks
+	    //   * The reconstructed particles
+	    SimpleChargedParticleMaker hadID = new SimpleChargedParticleMaker();
+	    hadID.setInputTrackList(EventHeader.TRACKS);
+	    hadID.setOutputTrackList("leftover tracks");
+	    hadID.setInputMIPList("mips");
+	    hadID.setInputClusterList("skeletons plus halo");
+	    hadID.setOutputParticleList("charged hadron particles");
+	    hadID.setDebug(false);
+	    add(hadID);
+
+	    // Step 6d: Handle fragments
+	    // Here are the fragment/small neutrals we've accumulated:
+	    //    * Small clusters ("mst clusters linked (<10 hits)")
+	    //    * Unassigned structural hits, if any ("structural unused hits minus halo")
+	    // Convert the individual hits into clusters:
+	    add(new HitMapToClusterListDriver("structural unused hits minus halo", "structural unused hits minus halo (clusters)"));
+	    // Merge fragments into a single cluster list:
+	    ListAddDriver combineFragmentClusters = new ListAddDriver();
+	    combineFragmentClusters.addInputList("mst clusters linked (<10 hits)");
+	    combineFragmentClusters.addInputList("structural unused hits minus halo (clusters)");
+	    combineFragmentClusters.setOutputList("small clusters");
+	    add(combineFragmentClusters);
+
+	    // Test: Try to figure out which are charged and which are neutral
+	    ChargedNeutralFragmentSeparator testCharged = new ChargedNeutralFragmentSeparator(EventHeader.TRACKS);
+	    testCharged.initPlots();
+	    add(testCharged);
+	    add(new ClusterListFilterDriver(testCharged, "small clusters", "charged small clusters"));
+
+	    // FragmentHandler
+	    // Inputs:
+	    //    * Charged particles ("charged hadron particles")
+	    //    * Skeleton clusters ("skeletons plus halo")
+	    //    * Small clusters    ("small clusters")
+	    // Outputs:
+	    //    * Particles (one per merged cluster)
+	    //    * Clusters after merging in fragments ("merged clusters");
+	    //    * Any remaining unassigned hits("leftover hits after fragment merge")
+	    org.lcsim.recon.cluster.structural.FragmentHandler fragMergeDriver = new org.lcsim.recon.cluster.structural.FragmentHandler();
+	    fragMergeDriver.addInputClusterList("skeletons plus halo");
+	    fragMergeDriver.addInputClusterList("small clusters");
+	    fragMergeDriver.setOutputClusterList("clusters with fragments merged");
+	    fragMergeDriver.setOutputHitMap("hits left over after fragment merge");
+	    org.lcsim.recon.cluster.structural.SimpleFragmentIdentifier fragID = new org.lcsim.recon.cluster.structural.SimpleFragmentIdentifier(10, 100.0);
+	    fragID.addParticleList("charged hadron particles");
+	    fragMergeDriver.setFragmentIdentifier(fragID); // don't cheat
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
+	    // What if we want to cheat? Set it up...
+	    {
+		// Here are our cluster lists:
+		//   "skeletons plus halo"
+		//   "mst clusters linked (<10 hits)"
+		// Here is the MC list:
+		//   "GenFinalStateParticles" [or sim]
+		// Here are the output lists:
+		//   "CheatFragmentIdentifier info P->C"
+		//   "CheatFragmentIdentifier info C->P"
+		ClusterListToHitMapDriver clusterConverter = new ClusterListToHitMapDriver();
+		clusterConverter.addInputList("skeletons plus halo");
+		clusterConverter.addInputList("mst clusters linked (<10 hits)");
+		clusterConverter.setOutputHitMap("HitMap for CheatFragmentIdentifier");
+		add(clusterConverter);
+		add(new HitMapToHitListDriver("HitMap for CheatFragmentIdentifier", "HitList for CheatFragmentIdentifier"));
+		add(new HitMapToHitListDriver("structural unused hits minus halo", "HitList2 for CheatFragmentIdentifier"));
+		String[] hitListNames = { "HitList for CheatFragmentIdentifier", "HitList2 for CheatFragmentIdentifier" } ; // Do we need HitList2?
+		String[] clusterListNames = { "skeletons plus halo", "mst clusters linked (<10 hits)" } ;
+		org.lcsim.recon.cluster.structural.CheatFragmentIdentifier cheatFragID = new org.lcsim.recon.cluster.structural.CheatFragmentIdentifier(hitListNames, clusterListNames, mcListName, "CheatFragmentIdentifier info P->C", "CheatFragmentIdentifier info C->P");
+		fragMergeDriver.setFragmentIdentifier(cheatFragID); // cheat
+	    }
+	    org.lcsim.recon.cluster.structural.SimpleFragmentMerger fragMerge = new org.lcsim.recon.cluster.structural.SimpleFragmentMerger();
+	    fragMergeDriver.setFragmentMerger(fragMerge); // don't cheat
+	    org.lcsim.recon.cluster.structural.CheatFragmentMerger cheatFragMerge = new org.lcsim.recon.cluster.structural.CheatFragmentMerger();
+	    cheatFragMerge.initializeAssociator("TmpHitList", "TmpClusterList", mcListName, "TmpMCCList", "TmpCMCList");
+	    add(cheatFragMerge);
+	    fragMergeDriver.setFragmentMerger(cheatFragMerge); // cheat
+	    add(fragMergeDriver);
+
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
+
+	    // Repeat the hadron ID step with the revised cluster list:
+	    SimpleChargedParticleMaker hadID2 = new SimpleChargedParticleMaker();
+	    hadID2.setInputTrackList(EventHeader.TRACKS);
+	    hadID2.setOutputTrackList("leftover tracks 2");
+	    hadID2.setInputMIPList("mips");
+	    hadID2.setInputClusterList("clusters with fragments merged");
+	    hadID2.setOutputParticleList("charged hadron particles 2");
+	    add(hadID2);
+	    // ... and then any remaining clusters should be neutral
+	    ClusterListFilterDriver removeChargedClusters = new ClusterListFilterDriver();
+	    VetoClustersFromParticles vetoCharged = new VetoClustersFromParticles("charged hadron particles 2");
+	    removeChargedClusters.setInputDecision(vetoCharged);
+	    removeChargedClusters.setInputList("clusters with fragments merged");
+	    removeChargedClusters.setOutputList("neutral clusters with fragments merged");
+	    add(vetoCharged);
+	    add(removeChargedClusters);
+	    //SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(310); // everything is a Ks0...
+	    SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(22); // everything is a photon
+	    hadID3.setInputClusterList("neutral clusters with fragments merged");
+	    hadID3.setOutputParticleList("neutral hadron particles");
+	    add(hadID3);
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "charged hadron particles 2", "neutral hadron particles", "hits left over after fragment merge", "photon particles" } );
+	    // Merge together all the particle lists:
+	    ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+	    mergeParticles.addInputList("charged hadron particles 2");
+	    mergeParticles.addInputList("neutral hadron particles");
+	    mergeParticles.addInputList("photon particles");
+	    mergeParticles.setOutputList("all particles");
+	    add(mergeParticles);
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits left over after fragment merge", "all particles" } );
+	    // Step 6e: Plots
+	    ConfusionPlotter confPlot = new ConfusionPlotter("all particles", mcListName);
+	    
+	    add(confPlot);
+	    //add(new DebugInfoParticleList("all particles"));
+	    add(new EnergySumPlotter("all particles", "test.aida"));
+	    //add(new DebugCheckHitCounts("all particles", "input hit map ecal", "input hit map hcal"));
+	    // Go to work finding the correction for missing energy:
+	    MapToHitMapDriver convertECAL = new MapToHitMapDriver();
+	    MapToHitMapDriver convertHCAL = new MapToHitMapDriver();
+	    convertECAL.setInputMap("input hit map ecal");
+	    convertHCAL.setInputMap("input hit map hcal");
+	    convertECAL.setOutputHitMap("converted ecal");
+	    convertHCAL.setOutputHitMap("converted hcal");
+	    add(convertECAL);
+	    add(convertHCAL);
+	    HitMapAddDriver adder = new HitMapAddDriver();
+	    adder.addInputHitMap("converted ecal");
+	    adder.addInputHitMap("converted hcal");
+	    adder.setOutputHitMap("converted all");
+	    add(adder);
+
+	    // Make plots:
+	    CorrectedEnergySumPlotter recoPlotter = new CorrectedEnergySumPlotter("converted all", "all particles", mcListName, "reco-corrected.aida");
+	    add(recoPlotter);
+
+            // Make plots with Ron's ClusterAnalysis routine:
+            // all particles = "charged hadron particles 2" + "neutral hadron particles" + "photon particles"
+            add(new ParticleListToClusterListDriver("charged hadron particles 2", "clusters of charged hadron particles 2"));
+            add(new ParticleListToClusterListDriver("neutral hadron particles", "clusters of neutral hadron particles"));
+            add(new ParticleListToClusterListDriver("photon particles", "clusters of photon particles"));
+            String[] MSTClusternames1 = {"clusters of charged hadron particles 2", "clusters of neutral hadron particles", "clusters of photon particles"};
+            String[] hitcollnames1 = {"EcalBarrDigiHits","EcalEndcapDigiHits","HcalBarrDigiHits","HcalEndcapDigiHits"};
+            add(new ClusterAnalysisDriver(MSTClusternames1,hitcollnames1, mcListName, "MatPlots"));
+
 	}
 
 	/*
-	add(new DebugInfoHitMap("input hit map ecal"));
-	add(new DebugInfoHitMap("input hit map hcal"));
-	add(new DebugInfoHitMap("hit map ecal without mips"));
-	add(new DebugInfoHitMap("hit map hcal without mips"));
-	add(new DebugInfoHitMap("hit map ecal without mips or clumps"));
-	add(new DebugInfoHitMap("hit map hcal without mips or clumps"));
-	add(new DebugInfoHitMap("ecal hit map after mst"));
-	add(new DebugInfoHitMap("hcal hit map after mst"));
-	add(new DebugInfoHitMap("hits from mst clusters linked (>=10 hits)"));
-	add(new DebugInfoHitMap("hits from mst clusters linked (<10 hits)"));
-	add(new DebugInfoHitMap("structural unused hits"));
-	add(new DebugInfoHitMap("structural unused hits minus halo"));
-	add(new DebugInfoHitMap("hits left over after fragment merge"));
-
-	add(new DebugInfoClusterList("mips ecal"));
-	add(new DebugInfoClusterList("mips hcal"));
-	add(new DebugInfoClusterList("mips"));
-	add(new DebugInfoClusterList("clumps ecal"));
-	add(new DebugInfoClusterList("clumps hcal"));
-	add(new DebugInfoClusterList("clumps"));
-	add(new DebugInfoClusterList("mst clusters ecal"));
-	add(new DebugInfoClusterList("mst clusters hcal"));
-	add(new DebugInfoClusterList("mst clusters linked"));
-	add(new DebugInfoClusterList("mst clusters linked (>=10 hits)"));
-	add(new DebugInfoClusterList("mst clusters linked (<10 hits)"));
-	add(new DebugInfoClusterList("skeletons"));
-	add(new DebugInfoClusterList("skeletons plus halo"));
-	add(new DebugInfoClusterList("clusters with fragments merged"));
-	add(new DebugInfoClusterList("neutral clusters with fragments merged"));
-
-	add(new DebugInfoTrackList(EventHeader.TRACKS));
-	add(new DebugInfoTrackList("leftover tracks"));
-	add(new DebugInfoTrackList("leftover tracks 2"));
+
+	// Here's another thing to look at: Perfect PFA
+	
+	// Cluster the hits (perfect pattern recognition)
+	PerfectClusterer clusterer = new PerfectClusterer();
+	clusterer.setInputHitMap("digi hitmap");
+	clusterer.setOutputHitMap("perfect leftover hits");
+	clusterer.setOutputClusterList("perfect clusters");
+	clusterer.setMCParticleList(mcList);
+	add(clusterer);
+	// ID the clusters and create reconstructed particles
+	PerfectIdentifier id = new PerfectIdentifier();
+	id.setInputClusterList("perfect clusters");
+	id.setOutputParticleList("perfect particles");
+	id.setMCParticleList(mcList);
+	id.setInputTrackList(EventHeader.TRACKS);
+	add(id);
 	
-	add(new DebugInfoParticleList("charged hadron particles"));
-	add(new DebugInfoParticleList("charged hadron particles 2"));
-	add(new DebugInfoParticleList("neutral hadron particles"));
 	*/
-	add(new DebugInfoParticleList("hadron particles"));
-	add(new EnergySumPlotter("hadron particles", "test.aida"));
-
-	// Go to work finding the correction for missing energy:
-	hitmap.MapToHitMapDriver convertECAL = new hitmap.MapToHitMapDriver();
-	hitmap.MapToHitMapDriver convertHCAL = new hitmap.MapToHitMapDriver();
-	convertECAL.setInputMap("input hit map ecal");
-	convertHCAL.setInputMap("input hit map hcal");
-	convertECAL.setOutputHitMap("converted ecal");
-	convertHCAL.setOutputHitMap("converted hcal");
-	add(convertECAL);
-	add(convertHCAL);
-	hitmap.HitMapAddDriver adder = new hitmap.HitMapAddDriver();
-	adder.addInputHitMap("converted ecal");
-	adder.addInputHitMap("converted hcal");
-	adder.setOutputHitMap("converted all");
-	add(adder);
-	// Set up the MC list
-	CreateFinalStateMCParticleList mcListMaker = new CreateFinalStateMCParticleList("Gen");
-	add(mcListMaker);
-	// Make plots:
-	add(new CorrectedEnergySumPlotter("converted all", "hadron particles", "GenFinalStateParticles", "reco-corrected.aida"));
+	// Make sure the book-keeping is OK:
+	add(accountant);
     }
 
     public void process(EventHeader event) {
@@ -291,7 +439,47 @@
                 quant.setEventInfo(event);
             }
         }
-        super.process(event);
+
+	// This is a clumsy way of doing a veto.
+	// Require >= 84 GeV in barrel:
+        if (checkEnergyBarrel(event, 0.923)) {
+	    super.process(event);
+	}
+    }
+
+    public void suspend() {
+	try {
+	    System.out.println("SUSPEND: Trying to save Ron's plots...");
+	    aida.saveAs("/tmp/ron.aida");
+	} catch (java.io.IOException x) {
+	    System.out.println("Warning: "+x);
+	}
+	super.suspend();
+    }
+
+    protected boolean checkEnergyBarrel(EventHeader event, double threshold) 
+    {
+        // Require that threshold (e.g. 90%) of the final-state particles are within the barrel ( cos(theta) < 0.8 )    
+        double energySumBarrel = 0.0;
+        double energySumNonBarrel = 0.0;
+        List<MCParticle> mcps = event.getMCParticles();
+        //List<MCParticle> mcps = event.get(MCParticle.class, "GenFinalStateParticles");
+        for (MCParticle mcp : mcps) {
+            if (mcp.getGeneratorStatus() == mcp.FINAL_STATE) {
+                Hep3Vector momentum = mcp.getMomentum();
+                double cosTheta = momentum.z() / momentum.magnitude();
+                if (Math.abs(cosTheta) < 0.8) {
+                    // barrel
+                    energySumBarrel += mcp.getEnergy();
+                } else {
+                    // non-barrel
+                    energySumNonBarrel += mcp.getEnergy();
+                }
+            }
+        }
+        double energySumTotal = energySumBarrel + energySumNonBarrel;
+	//System.out.println("DEBUG: Energy in barrel is "+energySumBarrel+"/"+energySumTotal+" = "+(energySumBarrel/energySumTotal));
+        return (energySumBarrel / energySumTotal > threshold);
     }
 
 
CVSspam 0.2.8