Commit in lcsim/src/org/lcsim/contrib/uiowa/template on MAIN
RonSnapshotPFA.java+684added 1.1
RonSnapshotPFAWrapper.java+28added 1.1
RonSnapshotPFAWrite.java+9added 1.1
+721
3 added files
snapshot of PFA for Ron

lcsim/src/org/lcsim/contrib/uiowa/template
RonSnapshotPFA.java added at 1.1
diff -N RonSnapshotPFA.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ RonSnapshotPFA.java	24 Oct 2006 17:48:32 -0000	1.1
@@ -0,0 +1,684 @@
+import java.util.*; 
+import hep.physics.vec.*;
+
+import org.lcsim.util.*;
+import org.lcsim.util.decision.*;
+import org.lcsim.util.hitmap.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+import org.lcsim.recon.cluster.mst.*;
+import org.lcsim.recon.cluster.mipfinder.*;
+import org.lcsim.recon.cluster.clumpfinder.*;
+import org.lcsim.recon.cluster.structural.likelihood.*;
+import org.lcsim.digisim.DigiSimDriver;
+import org.lcsim.recon.cluster.util.CalHitMapDriver;
+import org.lcsim.digisim.SimCalorimeterHitsDriver;
+import org.lcsim.recon.cluster.util.ClusterSizeDecision;
+import org.lcsim.recon.cluster.analysis.*;
+import org.lcsim.event.util.CreateFinalStateMCParticleList;
+import org.lcsim.mc.fast.tracking.MCFastTracking;
+import org.lcsim.recon.pfa.identifier.SimpleNeutralParticleMaker;
+import org.lcsim.recon.pfa.identifier.SimpleChargedParticleMaker;
+import org.lcsim.recon.pfa.identifier.MIPChargedParticleMaker;
+import org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher;
+import org.lcsim.recon.pfa.identifier.SimpleTrackMIPClusterMatcher;
+import org.lcsim.recon.pfa.identifier.TrackClusterMatcher;
+import org.lcsim.recon.cluster.util.BothCalorimetersDecision;
+import org.lcsim.recon.cluster.util.ClusterListFilterDriver;
+import org.lcsim.recon.cluster.util.VetoClustersFromParticles;
+import org.lcsim.recon.cluster.util.ParticleListToClusterListDriver;
+import org.lcsim.recon.pfa.output.EnergySumPlotter;
+import org.lcsim.recon.pfa.output.CorrectedEnergySumPlotter;
+import org.lcsim.recon.pfa.output.ConfusionPlotter;
+import org.lcsim.util.decision.ListFilterDriver;
+import org.lcsim.util.decision.ParticlePDGDecision;
+import org.lcsim.recon.cluster.cheat.PerfectClusterer;
+import org.lcsim.recon.cluster.structural.ChargedNeutralFragmentSeparator;
+import org.lcsim.recon.cluster.util.ClusterFirstLayerDecision;
+import org.lcsim.recon.pfa.structural.HitBookKeeper;
+
+//import org.lcsim.util.aida.AIDA;
+
+public class RonSnapshotPFA extends Driver
+{
+    // I think this is needed for Ron's analysis:
+    // private AIDA aida = AIDA.defaultInstance();
+
+    // Default is to read likelihood info in and use it to
+    // reconstruct clusters. Alternative is to create new
+    // likelihood output.
+    public RonSnapshotPFA() {
+	this(false);
+    }
+
+    public RonSnapshotPFA(boolean writeLikelihood)
+    {
+	// Book-keeping
+	HitBookKeeper accountant = new HitBookKeeper();
+	accountant.setDebug(false);
+	//accountant.setDebug(true);
+
+	// Set up the MC list
+	//
+	// Shift this bit into the process() routine:
+	CreateFinalStateMCParticleList mcListMakerGen = new CreateFinalStateMCParticleList("Gen");
+	CreateFinalStateMCParticleList mcListMakerSim = new CreateFinalStateMCParticleList("Sim");
+	add(mcListMakerGen);
+	add(mcListMakerSim);
+	String mcListNameGen = "GenFinalStateParticles";
+	String mcListNameSim = "SimFinalStateParticles";
+	String mcListName = mcListNameGen;
+
+	// Step 0: Run digisim
+
+	// CalHitMapDriver is needed by DigiSim
+	add(new CalHitMapDriver());
+	// DigiSim: SimCalHits -> RawCalHits
+	DigiSimDriver digi = new org.lcsim.digisim.DigiSimDriver();
+	add(digi);
+	// RawCalHits -> SimCalorimeterHits
+	add( new SimCalorimeterHitsDriver() );
+
+	// Step 1: Set up input hit lists:
+	{
+	    HitListToHitMapDriver hitmapEcal = new HitListToHitMapDriver();
+	    hitmapEcal.addInputList("EcalBarrDigiHits");
+	    hitmapEcal.addInputList("EcalEndcapDigiHits");
+	    hitmapEcal.setOutput("input hit map ecal");
+	    HitListToHitMapDriver hitmapHcal = new HitListToHitMapDriver();
+	    hitmapHcal.addInputList("HcalBarrDigiHits");
+	    hitmapHcal.addInputList("HcalEndcapDigiHits");
+	    hitmapHcal.setOutput("input hit map hcal");
+	    add(hitmapEcal);
+	    add(hitmapHcal);
+	}
+	accountant.addListOfNamedLists( new String[] { "input hit map ecal", "input hit map hcal" } );
+
+	// Find tracks
+	// Non-cheating: Output: List<Track> saved as EventHeader.TRACKS
+	add (new MCFastTracking());
+	// Cheating: Output: CombinedTracks (type CheatTrack)
+	org.lcsim.recon.ztracking.cheater.TrackingCheater trackCheater = new org.lcsim.recon.ztracking.cheater.TrackingCheater();
+	trackCheater.setUseFinalStateParticles(true);
+	add(trackCheater);
+
+	String nonCheatingTrackList = EventHeader.TRACKS;
+	String cheatingTrackList = new String("CombinedTracks");
+	//String trackList = nonCheatingTrackList;
+	String trackList = cheatingTrackList;
+
+
+	// Finding photons and MIPs needs to be done carefully, since
+	// many photons look like MIPs and vice-versa.
+	// For now we do it in chunks.
+	
+	// Find MIP candidates in ECAL, starting near the front:
+	{
+	    TrackClusterDriver ecalFrontSideMIPs = new TrackClusterDriver("input hit map ecal", "front-side mips ecal", "hit map ecal without front-side mips");
+	    ecalFrontSideMIPs.filterOutputClusters(new ClusterFirstLayerDecision(4));
+	    add(ecalFrontSideMIPs);
+	}
+
+	// Check if they match a track (ugly... don't need to make the particles!)
+	{
+	    MIPChargedParticleMaker mipHadID = new MIPChargedParticleMaker();
+	    SimpleTrackMIPClusterMatcher mipMatch = new SimpleTrackMIPClusterMatcher();
+	    add(mipMatch);
+	    mipHadID.setTrackMatcher(mipMatch);
+	    mipHadID.setInputTrackList(trackList);
+	    mipHadID.setOutputTrackList("tracks left over from front-side mips");
+	    mipHadID.setInputMIPList("front-side mips ecal");
+	    mipHadID.setOutputMIPList("mips minus front-side mips ecal");
+	    mipHadID.setOutputParticleList("front-side mip particles");
+	    add(mipHadID);
+	}
+
+	// Isolate the genuine (charged) MIPs from the rest
+	{
+	    ClusterListFilterDriver filterRemoveChargedMIPs = new ClusterListFilterDriver();
+	    ClusterListFilterDriver filterSelectChargedMIPs = new ClusterListFilterDriver();
+	    VetoClustersFromParticles vetoChargedMIPs = new VetoClustersFromParticles("front-side mip particles"); // veto MIPs
+	    DecisionMakerSingle<Cluster> selectChargedMIPs = new NotDecisionMakerSingle<Cluster> (vetoChargedMIPs); // invert veto to select MIPs
+	    filterRemoveChargedMIPs.setInputDecision(vetoChargedMIPs);
+	    filterSelectChargedMIPs.setInputDecision(selectChargedMIPs);
+	    filterRemoveChargedMIPs.setInputList("front-side mips ecal");
+	    filterSelectChargedMIPs.setInputList("front-side mips ecal");
+	    filterRemoveChargedMIPs.setOutputList("neutral front-side mips ecal");
+	    filterSelectChargedMIPs.setOutputList("charged front-side mips ecal");
+	    add(vetoChargedMIPs);
+	    add(filterRemoveChargedMIPs);
+	    add(filterSelectChargedMIPs);
+	}
+	// Merge the non-charged "MIP" hits back in
+	{
+	    ClusterListToHitMapDriver convertFakeMIPsToHitMap = new ClusterListToHitMapDriver("neutral front-side mips ecal", "hit map ecal of neutral front-side mips");
+	    add(convertFakeMIPsToHitMap);
+	    HitMapAddDriver addFakeMIPHitsBack = new HitMapAddDriver();
+	    addFakeMIPHitsBack.addInputHitMap("hit map ecal of neutral front-side mips");
+	    addFakeMIPHitsBack.addInputHitMap("hit map ecal without front-side mips");
+	    addFakeMIPHitsBack.setOutputHitMap("hit map ecal without charged front-side mips");
+	    add(addFakeMIPHitsBack);
+	}
+	
+	// Find photons
+	{
+	    MSTPhotonFinderDriver photonFinder = new MSTPhotonFinderDriver();
+	    photonFinder.setCoreThreshold(7.5);
+	    photonFinder.setFragmentThreshold(7.5);
+	    photonFinder.setLayerProximityThreshold(2);
+	    photonFinder.setCoreSizeMinimum(10);
+	    photonFinder.setFragmentSizeMaximum(6);
+	    photonFinder.setInputHitMap("hit map ecal without charged front-side mips");
+	    photonFinder.setOutputHitMap("hit map ecal without photons or charged front-side mips");
+	    photonFinder.setOutputClusterList("photon clusters");
+	    //photonFinder.setDebug(true);
+	    add(photonFinder);
+	}
+
+	// Merge the charged MIP hits back in
+	{
+	    ClusterListToHitMapDriver convertRealMIPsToHitMap = new ClusterListToHitMapDriver("charged front-side mips ecal", "hit map ecal of charged front-side mips");
+	    add(convertRealMIPsToHitMap);
+	    HitMapAddDriver addRealMIPHitsBack = new HitMapAddDriver();
+	    addRealMIPHitsBack.addInputHitMap("hit map ecal of charged front-side mips");
+	    addRealMIPHitsBack.addInputHitMap("hit map ecal without photons or charged front-side mips");
+	    addRealMIPHitsBack.setOutputHitMap("hit map ecal without photons");
+	    add(addRealMIPHitsBack);
+	}
+	
+	// Find track segments in ECAL and HCAL
+	{
+	    TrackClusterDriver ecalMIP = new TrackClusterDriver("hit map ecal without photons", "mips ecal", "hit map ecal without mips or photons");
+	    TrackClusterDriver hcalMIP = new TrackClusterDriver("input hit map hcal", "mips hcal", "hit map hcal without mips");
+	    add(ecalMIP);
+	    add(hcalMIP);
+	}
+        // Merge the two lists:
+	{
+	    ListAddDriver<Cluster> mergeMIPs = new ListAddDriver<Cluster>();
+	    mergeMIPs.addInputList("mips ecal");
+	    mergeMIPs.addInputList("mips hcal");
+	    mergeMIPs.setOutputList("mips");
+	    add(mergeMIPs);
+	    accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips ecal", "mips hcal", "photon clusters" } );
+	    accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips", "photon clusters" } );
+	}
+
+	//// Find photons in the ECAL (cheat for now)
+	//add(new ListFilterDriver(new ParticlePDGDecision(22), mcListName, "MCParticles photons only"));
+	//PerfectClusterer myCheatPhotonFinder = new PerfectClusterer();
+	//myCheatPhotonFinder.setInputHitMap("hit map ecal without mips");
+	//myCheatPhotonFinder.setOutputHitMap("hit map ecal without mips or photons");
+	//myCheatPhotonFinder.setOutputClusterList("photon clusters");
+	//myCheatPhotonFinder.setMCParticleList("MCParticles photons only");
+	//myCheatPhotonFinder.allowHitSharing(false);
+	//add(myCheatPhotonFinder);
+	//accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips ecal", "mips hcal", "photon clusters" } );
+
+	// ID photons
+	{
+	    SimpleNeutralParticleMaker myPhotonIdentifier = new SimpleNeutralParticleMaker(22); // 22 = photon
+	    myPhotonIdentifier.setInputClusterList("photon clusters");
+	    myPhotonIdentifier.setOutputParticleList("photon particles");
+	    add(myPhotonIdentifier);
+	    accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips ecal", "mips hcal", "photon particles" } );
+	}
+
+	// Find clumps in ECAL and HCAL
+	// Clumps are defined as contiguous sets of
+	// 6+ hits where the local hit density (in a 5x5x3 block) is 7/75
+	// or more for each hit. Track segment hits
+	// are NOT used.
+	ClumpFinder findClumpsECAL = new ClumpFinder("hit map ecal without mips or photons", "clumps ecal", "hit map ecal without mips or photons or clumps");
+	ClumpFinder findClumpsHCAL = new ClumpFinder("hit map hcal without mips", "clumps hcal", "hit map hcal without mips or clumps");
+        add(findClumpsECAL);
+        add(findClumpsHCAL);
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "photon particles", "clumps ecal", "clumps hcal" } );
+	ListAddDriver<Cluster> mergeClumps = new ListAddDriver<Cluster>();
+	mergeClumps.addInputList("clumps ecal");
+        mergeClumps.addInputList("clumps hcal");
+        mergeClumps.setOutputList("clumps");
+	add(mergeClumps);
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "photon particles", "clumps" } );
+
+	// Step 3: Find large-scale hadronic clusters with the MST
+	// Output: List<Cluster>
+	//NewMSTDriver mstEcal = new NewMSTDriver("ecal hit map after mst", "mst clusters ecal");
+	//NewMSTDriver mstHcal = new NewMSTDriver("hcal hit map after mst", "mst clusters hcal");
+	MSTClusterDriver mstEcal = new MSTClusterDriver("ecal hit map after mst", "mst clusters ecal");
+	MSTClusterDriver mstHcal = new MSTClusterDriver("hcal hit map after mst", "mst clusters hcal");
+	mstEcal.addInputHitMap("hit map ecal without mips or photons or clumps");
+	mstHcal.addInputHitMap("hit map hcal without mips or clumps");
+	mstEcal.addUserInputList("mips ecal");
+	mstHcal.addUserInputList("mips hcal");
+	mstEcal.addUserInputList("clumps ecal");
+	mstHcal.addUserInputList("clumps hcal");
+	mstEcal.setThreshold(30.0);
+	mstHcal.setThreshold(100.0);
+        mstEcal.registerMetrics(new MinimumHitToHitDistance());
+        mstHcal.registerMetrics(new MinimumHitToHitDistance());
+	add (mstEcal);
+	add (mstHcal);
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters ecal", "mst clusters hcal", "photon particles" } );
+
+	// Step 4: Link across the ECAL-HCAL boundary
+	// Input: List<Cluster> for ECAL
+	// Input: List<Cluster> for HCAL
+	// Output: List<Cluster>
+        MSTClusterDriver mstDriverLink = new MSTClusterDriver("User");
+        mstDriverLink.registerMetrics(new MinimumHitToHitDistance());
+        mstDriverLink.setThreshold(50.0); // 5cm
+        mstDriverLink.addUserInputList("mst clusters ecal");
+        mstDriverLink.addUserInputList("mst clusters hcal");
+        mstDriverLink.setClusterName("mst clusters linked");
+	mstDriverLink.setPairDecision(new BothCalorimetersDecision());
+        add(mstDriverLink);
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked", "photon particles" } );
+
+	// OK. Now we are going to weed out the small (<10 hit) clusters.
+	ClusterListFilterDriver sizeFilterDriver = new ClusterListFilterDriver();
+	sizeFilterDriver.setInputDecision(new ClusterSizeDecision(10));
+	sizeFilterDriver.setInputList("mst clusters linked");
+	sizeFilterDriver.setOutputList("mst clusters linked (>=10 hits)");
+	sizeFilterDriver.setOutputClusterListFail("mst clusters linked (<10 hits)");
+	sizeFilterDriver.setOutputHitMapPass("hits from mst clusters linked (>=10 hits)");
+	sizeFilterDriver.setOutputHitMapFail("hits from mst clusters linked (<10 hits)");
+	add(sizeFilterDriver);
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "photon particles" } );
+
+	if (writeLikelihood) {
+	    //   Step 6a: Make likelihood PDFs
+	    LikelihoodEvaluator eval = new LikelihoodEvaluator();
+	    eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackDOCA(), 50, 0.0, 100.0, false, true);
+	    eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackPOCAInCalorimeter(), 2, -0.5, 1.5, false, false);
+	    eval.addLikelihoodQuantityTrackToTrack(new TrackToTrackSmallestDistanceToPOCA(), 25, 0.0, 250.0, false, true);
+	    eval.addLikelihoodQuantityTrackToClump(new TrackToClumpDOCA(), 50, 0.0, 300.0, false, true);
+	    eval.addLikelihoodQuantityTrackToClump(new ClusterToClusterMinDistance(), 25, 0.0, 250.0, false, true);
+	    eval.addLikelihoodQuantityClumpToClump(new ClumpToClumpDOCA(), 20, 0.0, 200.0, false, true);
+	    eval.addLikelihoodQuantityClumpToClump(new ClusterToClusterMinDistance(), 20, 0.0, 200.0, false, true);
+	    // Handle things that have per-event info:
+	    makeEventInfoList(eval); 
+	    org.lcsim.recon.cluster.structural.LikelihoodFindingStructuralDriver likelihoodWriter = new org.lcsim.recon.cluster.structural.LikelihoodFindingStructuralDriver(eval, "mst clusters linked (>=10 hits)", "mips", "clumps");
+	    String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
+	    String[] inputClusterListsForAssociator = {"mips", "clumps"};
+	    likelihoodWriter.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
+	    add(likelihoodWriter);
+	    LikelihoodEvaluatorCheckpointDriver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
+	    checkpoint.setVerbose(true);
+	    add(checkpoint);
+	} else {
+
+	    // Step 6a: Structural algorithm
+	    // Inputs from event:
+	    //   * Big (MST) clusters
+	    //   * Track segment clusters
+	    //   * Clump clusters
+	    // Output to event:
+	    //   * List of skeleton clusters
+	    //   * Hitmap containing clustered hits that aren't in skeletons
+	    LikelihoodEvaluator eval = LikelihoodEvaluator.readFromFile("likelihood.bin");
+	    // Some likelihood quantities need per-event info:
+	    makeEventInfoList(eval);
+	    
+	    // Set up linker:
+	    org.lcsim.recon.cluster.structural.LikelihoodLinkDriver likelihoodLinker = null;
+	    boolean cheatOnLinker = false;
+	    boolean makePlotsWhenNotCheating = false;
+	    if (cheatOnLinker) {
+		// cheat
+		likelihoodLinker = new org.lcsim.recon.cluster.structural.CheatLikelihoodLinkDriver("mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
+		String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
+		String[] inputClusterListsForAssociator = {"mips", "clumps"};
+		likelihoodLinker.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
+	    } else {
+		// don't cheat
+		if (!makePlotsWhenNotCheating) {
+		    // no plots
+		    likelihoodLinker = new org.lcsim.recon.cluster.structural.LikelihoodLinkDriver(eval, 0.5, 0.5, 0.5, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
+		} else {
+		    // plots
+		    org.lcsim.recon.cluster.structural.LikelihoodLinkPlotDriver plotter = new org.lcsim.recon.cluster.structural.LikelihoodLinkPlotDriver(eval, 0.5, 0.5, 0.5, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
+		    plotter.initPlots("likelihood.aida");
+		    likelihoodLinker = plotter;
+		    String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
+		    String[] inputClusterListsForAssociator = {"mips", "clumps"};
+		    likelihoodLinker.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
+		}
+	    }
+	    // finish setting up linker
+	    likelihoodLinker.setIgnoreClusterDecision(new ClusterSizeDecision(10));
+	    add(likelihoodLinker);
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons", "structural unused hits", "mst clusters linked (<10 hits)", "photon particles" } );
+
+	    // Step 6b: Halo
+	    // Inputs from event:
+	    //   * Skeleton clusters
+	    //   * Hitmap containing clustered hits that aren't in skeletons
+	    // Outputs:
+	    //   * Fleshed-out skeletons (with halo added)
+	    //   * Modified hitmap with any remaining clustered hits
+	    add(new org.lcsim.recon.cluster.structural.HaloAssigner("skeletons", "structural unused hits", "skeletons plus halo", "structural unused hits minus halo"));
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons plus halo", "structural unused hits minus halo", "mst clusters linked (<10 hits)", "photon particles" } );
+
+	    // Step 6c: Extrapolate tracks to ECAL surface, form charged particles
+
+	    // First try the MIPs...
+	    {
+		MIPChargedParticleMaker hadIDmip = new MIPChargedParticleMaker();
+		SimpleTrackMIPClusterMatcher mipMatch = new SimpleTrackMIPClusterMatcher();
+		add(mipMatch);
+		hadIDmip.setTrackMatcher(mipMatch);
+		hadIDmip.setInputTrackList(trackList);
+		hadIDmip.setOutputTrackList("tracks minus mip associations");
+		hadIDmip.setInputMIPList("mips");
+		hadIDmip.setOutputParticleList("charged hadron particles with mip association");
+		hadIDmip.setOutputMIPList("unmatched mips");
+		hadIDmip.addClusterList("skeletons plus halo", "skeletons plus halo minus charged particles from mips");
+		hadIDmip.addClusterList("mst clusters linked (<10 hits)", "mst clusters linked (<10 hits) minus charged particles from mips");
+		add(hadIDmip);
+	    }
+
+	    // Then try the clusters generically:
+	    {
+		SimpleChargedParticleMaker hadID = new SimpleChargedParticleMaker();
+		SimpleTrackClusterMatcher clusMatch = new SimpleTrackClusterMatcher();
+		add(clusMatch);
+		hadID.setTrackMatcher(clusMatch);
+		hadID.setInputTrackList("tracks minus mip associations");
+		hadID.setOutputTrackList("leftover tracks");
+		hadID.setInputClusterList("skeletons plus halo minus charged particles from mips");
+		hadID.setOutputParticleList("charged hadron particles with non-mip association");
+		add(hadID);
+	    }
+
+	    // Merge the two particle lists:
+	    {
+		ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+		mergeParticles.addInputList("charged hadron particles with mip association");
+		mergeParticles.addInputList("charged hadron particles with non-mip association");
+		mergeParticles.setOutputList("charged hadron particles");
+		add(mergeParticles);
+	    }
+
+	    // Step 6d: Handle fragments
+
+	    org.lcsim.recon.cluster.structural.FragmentHandler fragMergeDriver = new org.lcsim.recon.cluster.structural.FragmentHandler();
+	    fragMergeDriver.addInputClusterList("skeletons plus halo");
+	    fragMergeDriver.addInputClusterList("mst clusters linked (<10 hits)");
+	    fragMergeDriver.setOutputClusterList("clusters with fragments merged");
+	    fragMergeDriver.setOutputHitMap("hits left over after fragment merge");
+	    org.lcsim.recon.cluster.structural.SimpleFragmentIdentifier fragID = new org.lcsim.recon.cluster.structural.SimpleFragmentIdentifier(10, 100.0);
+	    fragID.addParticleList("charged hadron particles");
+	    fragMergeDriver.setFragmentIdentifier(fragID); // don't cheat
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
+
+	    boolean cheatOnFragmentMerge = false;
+	    if (cheatOnFragmentMerge) {
+		// Here are our cluster lists:
+		//   "skeletons plus halo plus orphan mips"
+		//   "mst clusters linked (<10 hits) minus orphan mips"
+		// Here is the MC list:
+		//   "GenFinalStateParticles" [or sim]
+		// Here are the output lists:
+		//   "CheatFragmentIdentifier info P->C"
+		//   "CheatFragmentIdentifier info C->P"
+		ClusterListToHitMapDriver clusterConverter = new ClusterListToHitMapDriver();
+		clusterConverter.addInputList("skeletons plus halo");
+		clusterConverter.addInputList("mst clusters linked (<10 hits)");
+		clusterConverter.setOutputHitMap("HitMap for CheatFragmentIdentifier");
+		add(clusterConverter);
+		add(new HitMapToHitListDriver("HitMap for CheatFragmentIdentifier", "HitList for CheatFragmentIdentifier"));
+		add(new HitMapToHitListDriver("structural unused hits minus halo", "HitList2 for CheatFragmentIdentifier"));
+		String[] hitListNames = { "HitList for CheatFragmentIdentifier", "HitList2 for CheatFragmentIdentifier" } ; // Do we need HitList2?
+		String[] clusterListNames = { "skeletons plus halo", "mst clusters linked (<10 hits)" } ;
+		org.lcsim.recon.cluster.structural.CheatFragmentIdentifier cheatFragID = new org.lcsim.recon.cluster.structural.CheatFragmentIdentifier(hitListNames, clusterListNames, mcListName, "CheatFragmentIdentifier info P->C", "CheatFragmentIdentifier info C->P");
+		fragMergeDriver.setFragmentIdentifier(cheatFragID); // cheat
+		org.lcsim.recon.cluster.structural.CheatFragmentMerger cheatFragMerge = new org.lcsim.recon.cluster.structural.CheatFragmentMerger();
+		cheatFragMerge.initializeAssociator("TmpHitList", "TmpClusterList", mcListName, "TmpMCCList", "TmpCMCList");
+		fragMergeDriver.setFragmentMerger(cheatFragMerge); // cheat [broken?]
+		add(cheatFragMerge);
+	    } else {
+		org.lcsim.recon.cluster.structural.SimpleFragmentMerger fragMerge = new org.lcsim.recon.cluster.structural.SimpleFragmentMerger();
+		fragMergeDriver.setFragmentMerger(fragMerge); // don't cheat
+		add(fragMergeDriver);
+	    }
+
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
+
+	    // Repeat the hadron ID step with the revised cluster list:
+
+	    // First try the MIPs...
+	    {
+		MIPChargedParticleMaker hadIDmip2 = new MIPChargedParticleMaker();
+		SimpleTrackMIPClusterMatcher mipMatch = new SimpleTrackMIPClusterMatcher();
+		add(mipMatch);
+		hadIDmip2.setTrackMatcher(mipMatch);
+		hadIDmip2.setInputTrackList(trackList);
+		hadIDmip2.setOutputTrackList("tracks minus mip associations 2");
+		hadIDmip2.setInputMIPList("mips");
+		hadIDmip2.setOutputParticleList("charged hadron particles with mip association 2");
+		hadIDmip2.setOutputMIPList("unmatched mips 2");
+		hadIDmip2.addClusterList("clusters with fragments merged", "clusters with fragments merged minus charged particles from mips");
+		add(hadIDmip2);
+	    }
+
+	    // Then try the clusters generically:
+	    {
+		SimpleChargedParticleMaker hadID2 = new SimpleChargedParticleMaker();
+		SimpleTrackClusterMatcher clusMatch = new SimpleTrackClusterMatcher();
+		add(clusMatch);
+		hadID2.setTrackMatcher(clusMatch);
+		hadID2.setInputTrackList("tracks minus mip associations 2");
+		hadID2.setOutputTrackList("leftover tracks 2");
+		hadID2.setInputClusterList("clusters with fragments merged minus charged particles from mips");
+		hadID2.setOutputParticleList("charged hadron particles with non-mip association 2");
+		add(hadID2);
+	    }
+
+	    // Merge the two particle lists:
+	    {
+		ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+		mergeParticles.addInputList("charged hadron particles with mip association 2");
+		mergeParticles.addInputList("charged hadron particles with non-mip association 2");
+		mergeParticles.setOutputList("charged hadron particles 2");
+		add(mergeParticles);
+	    }
+
+	    // ... and then any remaining clusters should be neutral
+	    ClusterListFilterDriver removeChargedClusters = new ClusterListFilterDriver();
+	    VetoClustersFromParticles vetoCharged = new VetoClustersFromParticles("charged hadron particles 2");
+	    removeChargedClusters.setInputDecision(vetoCharged);
+	    removeChargedClusters.setInputList("clusters with fragments merged");
+	    removeChargedClusters.setOutputList("neutral clusters with fragments merged");
+	    add(vetoCharged);
+	    add(removeChargedClusters);
+	    //SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(310); // everything is a Ks0...
+	    SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(22); // everything is a photon
+	    hadID3.setInputClusterList("neutral clusters with fragments merged");
+	    hadID3.setOutputParticleList("neutral hadron particles");
+	    add(hadID3);
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "charged hadron particles 2", "neutral hadron particles", "hits left over after fragment merge", "photon particles" } );
+	    // Merge together all the particle lists:
+	    ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+	    mergeParticles.addInputList("charged hadron particles 2");
+	    mergeParticles.addInputList("neutral hadron particles");
+	    mergeParticles.addInputList("photon particles");
+	    mergeParticles.setOutputList("all particles");
+	    add(mergeParticles);
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits left over after fragment merge", "all particles" } );
+	    // Step 6e: Plots
+	    //ConfusionPlotter confPlot = new ConfusionPlotter("all particles", mcListName);
+	    
+	    //add(confPlot);
+	    //add(new DebugInfoParticleList("all particles"));
+	    //add(new EnergySumPlotter("all particles", "test.aida"));
+	    //add(new DebugCheckHitCounts("all particles", "input hit map ecal", "input hit map hcal"));
+	    // Go to work finding the correction for missing energy:
+	    MapToHitMapDriver convertECAL = new MapToHitMapDriver();
+	    MapToHitMapDriver convertHCAL = new MapToHitMapDriver();
+	    convertECAL.setInputMap("input hit map ecal");
+	    convertHCAL.setInputMap("input hit map hcal");
+	    convertECAL.setOutputHitMap("converted ecal");
+	    convertHCAL.setOutputHitMap("converted hcal");
+	    add(convertECAL);
+	    add(convertHCAL);
+	    HitMapAddDriver adder = new HitMapAddDriver();
+	    adder.addInputHitMap("converted ecal");
+	    adder.addInputHitMap("converted hcal");
+	    adder.setOutputHitMap("converted all");
+	    add(adder);
+
+	    // Make plots:
+	    
+	    //CorrectedEnergySumPlotter recoPlotter = new CorrectedEnergySumPlotter("converted all", "all particles", mcListName, "reco-corrected.aida");
+	    //CorrectedEnergySumPlotter recoPlotter = new org.lcsim.recon.pfa.output.MatCalibrationPlots("converted all", "all particles", mcListName, "reco-corrected.aida");
+	    //add(recoPlotter);
+	    //add(new EnergySumPlotter("all particles", "reco-uncorrected.aida"));
+
+            // Make plots with Ron's ClusterAnalysis routine:
+            //// all particles = "charged hadron particles 2" + "neutral hadron particles" + "photon particles"
+            //add(new ParticleListToClusterListDriver("charged hadron particles 2", "clusters of charged hadron particles 2"));
+            //add(new ParticleListToClusterListDriver("neutral hadron particles", "clusters of neutral hadron particles"));
+            //add(new ParticleListToClusterListDriver("photon particles", "clusters of photon particles"));
+            //String[] MSTClusternames1 = {"clusters of charged hadron particles 2", "clusters of neutral hadron particles", "clusters of photon particles"};
+            //String[] hitcollnames1 = {"EcalBarrDigiHits","EcalEndcapDigiHits","HcalBarrDigiHits","HcalEndcapDigiHits"};
+            //add(new ClusterAnalysisDriver(MSTClusternames1,hitcollnames1, mcListName, "MatPlots"));
+
+	}
+
+	/*
+
+	// Here's another thing to look at: Perfect PFA
+	
+	// Cluster the hits (perfect pattern recognition)
+	PerfectClusterer clusterer = new PerfectClusterer();
+	clusterer.setInputHitMap("digi hitmap");
+	clusterer.setOutputHitMap("perfect leftover hits");
+	clusterer.setOutputClusterList("perfect clusters");
+	clusterer.setMCParticleList(mcList);
+	add(clusterer);
+	// ID the clusters and create reconstructed particles
+	PerfectIdentifier id = new PerfectIdentifier();
+	id.setInputClusterList("perfect clusters");
+	id.setOutputParticleList("perfect particles");
+	id.setMCParticleList(mcList);
+	id.setInputTrackList(EventHeader.TRACKS);
+	add(id);
+	
+	*/
+	// Make sure the book-keeping is OK:
+	add(accountant);
+    }
+
+    public void process(EventHeader event) {
+        // Special handling of things that need per-event info.
+	// This probably belongs in its own driver.
+        if (m_perEventQuantities != null) {
+            for (StructuralLikelihoodQuantityWithEventInfo quant : m_perEventQuantities) {
+                quant.setEventInfo(event);
+            }
+        }
+
+	// This is a clumsy way of doing a veto.
+	// Require >= 84 GeV in barrel:
+        //if (checkEnergyBarrel(event, 0.923)) {
+	//super.process(event);
+	//}
+
+	// Require in barrel (i.e. primary qqbar pair has
+	// |cos(theta)| < 1/sqrt(2)):
+	if (angleCheckLei(event, 0.707106781)) {
+	    super.process(event);
+	}
+    }
+
+    protected boolean angleCheckLei(EventHeader event, double threshold) 
+    {
+	// Require that the primary qqbar pair has |cos(theta)| < threshold
+	// where theta is the polar angle, with theta=0 being the beam direction.
+	// Lei suggests a cut of |cos(theta)| < sqrt(0.5)
+	MCParticle mostEnergeticQuark = null;
+	MCParticle mostEnergeticAntiQuark = null;
+	List<MCParticle> mcParticles = event.getMCParticles();
+	for (MCParticle part : mcParticles) {
+	    int pdg = part.getPDGID();
+	    if (pdg>0 && pdg<7) {
+		if (mostEnergeticQuark==null || part.getEnergy() > mostEnergeticQuark.getEnergy()) {
+		    mostEnergeticQuark = part;
+		}
+	    } else if (pdg<0 && pdg>-7) {
+		if (mostEnergeticAntiQuark==null || part.getEnergy() > mostEnergeticAntiQuark.getEnergy()) {
+		    mostEnergeticAntiQuark = part;
+		}
+	    }
+	}
+	if (mostEnergeticQuark==null || mostEnergeticAntiQuark==null) {
+	    System.out.println("No quark/antiquark!");
+	    System.out.println("mostEnergeticQuark="+mostEnergeticQuark);
+	    System.out.println("mostEnergeticAntiQuark="+mostEnergeticAntiQuark);
+	    System.out.println("failing this event...");
+	    return false;
+	} else {
+	    double cosThetaQuark = mostEnergeticQuark.getMomentum().z() / mostEnergeticQuark.getMomentum().magnitude();
+	    double cosThetaAntiQuark = mostEnergeticAntiQuark.getMomentum().z() / mostEnergeticAntiQuark.getMomentum().magnitude();
+	    boolean eventCutPassedQuark = (Math.abs(cosThetaQuark) < threshold);
+	    boolean eventCutPassedAntiQuark = (Math.abs(cosThetaAntiQuark) < threshold);
+	    if (eventCutPassedQuark != eventCutPassedAntiQuark) {
+		System.out.println("Quark-antiquark ambiguity!");
+		System.out.println("mostEnergeticQuark has pdg="+mostEnergeticQuark.getPDGID()+" and energy "+mostEnergeticQuark.getEnergy()+" and momentum=("+mostEnergeticQuark.getMomentum().x()+","+mostEnergeticQuark.getMomentum().y()+","+mostEnergeticQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
+		System.out.println("mostEnergeticAntiQuark pdg="+mostEnergeticAntiQuark.getPDGID()+" and energy "+mostEnergeticAntiQuark.getEnergy()+" and momentum=("+mostEnergeticAntiQuark.getMomentum().x()+","+mostEnergeticAntiQuark.getMomentum().y()+","+mostEnergeticAntiQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
+		System.out.println("failing this event...");
+		return false;
+	    } else {
+		return eventCutPassedQuark;
+	    }
+	}
+    }
+
+    protected boolean checkEnergyBarrel(EventHeader event, double threshold) 
+    {
+        // Require that threshold (e.g. 90%) of the final-state particles are within the barrel ( cos(theta) < 0.8 )    
+        double energySumBarrel = 0.0;
+        double energySumNonBarrel = 0.0;
+        List<MCParticle> mcps = event.getMCParticles();
+        //List<MCParticle> mcps = event.get(MCParticle.class, "GenFinalStateParticles");
+        for (MCParticle mcp : mcps) {
+            if (mcp.getGeneratorStatus() == mcp.FINAL_STATE) {
+                Hep3Vector momentum = mcp.getMomentum();
+                double cosTheta = momentum.z() / momentum.magnitude();
+                if (Math.abs(cosTheta) < 0.8) {
+                    // barrel
+                    energySumBarrel += mcp.getEnergy();
+                } else {
+                    // non-barrel
+                    energySumNonBarrel += mcp.getEnergy();
+                }
+            }
+        }
+        double energySumTotal = energySumBarrel + energySumNonBarrel;
+	//System.out.println("DEBUG: Energy in barrel is "+energySumBarrel+"/"+energySumTotal+" = "+(energySumBarrel/energySumTotal));
+        return (energySumBarrel / energySumTotal > threshold);
+    }
+
+
+    protected void makeEventInfoList(LikelihoodEvaluator eval) 
+    {
+        // Handle things that have per-event info:
+        m_perEventQuantities = new Vector<StructuralLikelihoodQuantityWithEventInfo>();
+        List<StructuralLikelihoodQuantity> quantities = eval.getLikelihoodQuantities();
+        for (StructuralLikelihoodQuantity quant : quantities) {
+            if (quant instanceof StructuralLikelihoodQuantityWithEventInfo) {
+                StructuralLikelihoodQuantityWithEventInfo quantTmp = (StructuralLikelihoodQuantityWithEventInfo) (quant);
+                m_perEventQuantities.add(quantTmp);
+            }
+        }
+    }
+
+    // Special treatment for likelihood quantities that need per-event info
+    // (e.g. geometry)
+    List<StructuralLikelihoodQuantityWithEventInfo> m_perEventQuantities = null;
+
+}

lcsim/src/org/lcsim/contrib/uiowa/template
RonSnapshotPFAWrapper.java added at 1.1
diff -N RonSnapshotPFAWrapper.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ RonSnapshotPFAWrapper.java	24 Oct 2006 17:48:33 -0000	1.1
@@ -0,0 +1,28 @@
+import org.lcsim.recon.cluster.util.ParticleListToClusterListDriver;
+import org.lcsim.util.Driver;
+import org.lcsim.recon.cluster.analysis.*;
+
+public class RonSnapshotPFAWrapper extends Driver 
+{
+    public RonSnapshotPFAWrapper() {
+
+	// Launch the PFA.
+	// Outputs:
+	//   * "charged hadron particles 2"
+	//   * "neutral hadron particles"
+	//   * "photon particles"
+	// and their sum is "all particles"
+	RonSnapshotPFA wrappedPFA = new RonSnapshotPFA(false);
+	add(wrappedPFA);
+
+	// Make plots with Ron's ClusterAnalysis routine:
+	add(new ParticleListToClusterListDriver("charged hadron particles 2", "clusters of charged hadron particles 2"));
+	add(new ParticleListToClusterListDriver("neutral hadron particles", "clusters of neutral hadron particles"));
+	add(new ParticleListToClusterListDriver("photon particles", "clusters of photon particles"));
+	String[] MSTClusternames1 = {"clusters of charged hadron particles 2", "clusters of neutral hadron particles", "clusters of photon particles"};
+	String[] hitcollnames1 = {"EcalBarrDigiHits","EcalEndcapDigiHits","HcalBarrDigiHits","HcalEndcapDigiHits"};
+	add(new ClusterAnalysisDriver(MSTClusternames1,hitcollnames1, "GenFinalStateParticles", "MatPlots"));
+
+	
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa/template
RonSnapshotPFAWrite.java added at 1.1
diff -N RonSnapshotPFAWrite.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ RonSnapshotPFAWrite.java	24 Oct 2006 17:48:33 -0000	1.1
@@ -0,0 +1,9 @@
+import org.lcsim.util.Driver;
+
+public class RonSnapshotPFAWrite extends Driver {
+    public RonSnapshotPFAWrite() {
+	RonSnapshotPFA wrappedPFA = new RonSnapshotPFA(true);
+	add(wrappedPFA);
+    }
+}
+
CVSspam 0.2.8