Commit in lcsim/src/org/lcsim/contrib/uiowa/template on MAIN
NonTrivialPFA.java+302-1041.8 -> 1.9
Current PFA snapshot

lcsim/src/org/lcsim/contrib/uiowa/template
NonTrivialPFA.java 1.8 -> 1.9
diff -u -r1.8 -r1.9
--- NonTrivialPFA.java	24 Oct 2006 17:20:51 -0000	1.8
+++ NonTrivialPFA.java	3 Nov 2006 22:20:59 -0000	1.9
@@ -12,6 +12,7 @@
 import org.lcsim.recon.cluster.mipfinder.*;
 import org.lcsim.recon.cluster.clumpfinder.*;
 import org.lcsim.recon.cluster.structural.likelihood.*;
+import org.lcsim.recon.cluster.structural.FragmentIdentifierDecisionMaker;
 import org.lcsim.digisim.DigiSimDriver;
 import org.lcsim.recon.cluster.util.CalHitMapDriver;
 import org.lcsim.digisim.SimCalorimeterHitsDriver;
@@ -25,6 +26,7 @@
 import org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher;
 import org.lcsim.recon.pfa.identifier.SimpleTrackMIPClusterMatcher;
 import org.lcsim.recon.pfa.identifier.TrackClusterMatcher;
+import org.lcsim.recon.pfa.identifier.SmallPhotonMaker;
 import org.lcsim.recon.cluster.util.BothCalorimetersDecision;
 import org.lcsim.recon.cluster.util.ClusterListFilterDriver;
 import org.lcsim.recon.cluster.util.VetoClustersFromParticles;
@@ -37,6 +39,11 @@
 import org.lcsim.recon.cluster.cheat.PerfectClusterer;
 import org.lcsim.recon.cluster.structural.ChargedNeutralFragmentSeparator;
 import org.lcsim.recon.cluster.util.ClusterFirstLayerDecision;
+import org.lcsim.recon.cluster.structural.SimpleFragmentIdentifier;
+import org.lcsim.recon.pfa.output.DebugPrintClusterInfo;
+import org.lcsim.recon.cluster.util.GenericClusterEnergyCalculator;
+import org.lcsim.recon.cluster.util.PhotonClusterEnergyCalculator;
+import org.lcsim.recon.cluster.util.HitInECALDecision;
 
 import org.lcsim.util.aida.AIDA;
 
@@ -59,6 +66,10 @@
 	accountant.setDebug(false);
 	//accountant.setDebug(true);
 
+	// Ad-hoc calibration
+	org.lcsim.contrib.uiowa.compile.AdHocEnergyCalibration adHocCalib = new org.lcsim.contrib.uiowa.compile.AdHocEnergyCalibration();
+	add(adHocCalib);
+
 	// Set up the MC list
 	//
 	// Shift this bit into the process() routine:
@@ -98,15 +109,15 @@
 	// Find tracks
 	// Non-cheating: Output: List<Track> saved as EventHeader.TRACKS
 	add (new MCFastTracking());
-	// Cheating: Output: CombinedTracks (type CheatTrack)
-	org.lcsim.recon.ztracking.cheater.TrackingCheater trackCheater = new org.lcsim.recon.ztracking.cheater.TrackingCheater();
-	trackCheater.setUseFinalStateParticles(true);
-	add(trackCheater);
+	// // Cheating: Output: CombinedTracks (type CheatTrack)
+ 	// org.lcsim.recon.ztracking.cheater.TrackingCheater trackCheater = new org.lcsim.recon.ztracking.cheater.TrackingCheater();
+	// trackCheater.setUseFinalStateParticles(true);
+	// add(trackCheater);
 
 	String nonCheatingTrackList = EventHeader.TRACKS;
 	String cheatingTrackList = new String("CombinedTracks");
-	//String trackList = nonCheatingTrackList;
-	String trackList = cheatingTrackList;
+	String trackList = nonCheatingTrackList;
+	//String trackList = cheatingTrackList;
 
 
 	// Finding photons and MIPs needs to be done carefully, since
@@ -132,6 +143,8 @@
 	    mipHadID.setOutputMIPList("mips minus front-side mips ecal");
 	    mipHadID.setOutputParticleList("front-side mip particles");
 	    add(mipHadID);
+	    //mipMatch.setDebug(true);
+	    //mipHadID.setDebug(true);
 	}
 
 	// Isolate the genuine (charged) MIPs from the rest
@@ -169,24 +182,59 @@
 	    photonFinder.setLayerProximityThreshold(2);
 	    photonFinder.setCoreSizeMinimum(10);
 	    photonFinder.setFragmentSizeMaximum(6);
+	    photonFinder.setCoreFirstLayerRange(5);
 	    photonFinder.setInputHitMap("hit map ecal without charged front-side mips");
 	    photonFinder.setOutputHitMap("hit map ecal without photons or charged front-side mips");
-	    photonFinder.setOutputClusterList("photon clusters");
-	    //photonFinder.setDebug(true);
+	    photonFinder.setOutputClusterList("photon clusters (unfiltered)");
 	    add(photonFinder);
+	    //photonFinder.setDebug(true);
+	    //add(new DebugPrintClusterInfo("photon clusters (unfiltered)"));
 	}
 
-	// Merge the charged MIP hits back in
+	// ... except some of those "photons" are the start of charged showers,
+	// so check again and exclude any which match up to a track.
 	{
+	    // Check if any "photons" have a track match:
+	    SimpleChargedParticleMaker hadID = new SimpleChargedParticleMaker();
+	    SimpleTrackClusterMatcher clusMatch = new SimpleTrackClusterMatcher();
+	    add(clusMatch);
+	    hadID.setTrackMatcher(clusMatch);
+	    hadID.setInputTrackList(trackList);
+	    hadID.setOutputTrackList("tracks left over from front-side photon-like showers");
+	    hadID.setInputClusterList("photon clusters (unfiltered)");
+	    hadID.setOutputParticleList("front-side photon-like charged particles");
+	    //hadID.setDebug(true);
+	    //clusMatch.setDebug(true);
+	    add(hadID);
+	    // If so, remove them from the photon list:
+	    ClusterListFilterDriver filterRemoveChargedClusters = new ClusterListFilterDriver();
+	    ClusterListFilterDriver filterSelectChargedClusters = new ClusterListFilterDriver();
+	    VetoClustersFromParticles vetoChargedClusters = new VetoClustersFromParticles("front-side photon-like charged particles"); // veto charged clusters
+	    DecisionMakerSingle<Cluster> selectChargedClusters = new NotDecisionMakerSingle<Cluster> (vetoChargedClusters); // invert veto to select charged clusters
+	    filterRemoveChargedClusters.setInputDecision(vetoChargedClusters);
+	    filterSelectChargedClusters.setInputDecision(selectChargedClusters);
+	    filterRemoveChargedClusters.setInputList("photon clusters (unfiltered)");
+	    filterSelectChargedClusters.setInputList("photon clusters (unfiltered)");
+	    filterRemoveChargedClusters.setOutputList("photon clusters");
+	    filterSelectChargedClusters.setOutputList("charged front-side photon-like clusters");
+	    add(vetoChargedClusters);
+	    add(filterRemoveChargedClusters);
+	    add(filterSelectChargedClusters);
+	    // Add the charged "photons" back to the event, along with the charged MIPs:
+	    ClusterListToHitMapDriver convertChargedClustersToHitMap = new ClusterListToHitMapDriver("charged front-side photon-like clusters", "hit map ecal of charged photon-like clusters");
 	    ClusterListToHitMapDriver convertRealMIPsToHitMap = new ClusterListToHitMapDriver("charged front-side mips ecal", "hit map ecal of charged front-side mips");
+	    add(convertChargedClustersToHitMap);
 	    add(convertRealMIPsToHitMap);
-	    HitMapAddDriver addRealMIPHitsBack = new HitMapAddDriver();
-	    addRealMIPHitsBack.addInputHitMap("hit map ecal of charged front-side mips");
-	    addRealMIPHitsBack.addInputHitMap("hit map ecal without photons or charged front-side mips");
-	    addRealMIPHitsBack.setOutputHitMap("hit map ecal without photons");
-	    add(addRealMIPHitsBack);
+	    HitMapAddDriver addChargedClusterHitsBack = new HitMapAddDriver();
+	    addChargedClusterHitsBack.addInputHitMap("hit map ecal of charged photon-like clusters");
+	    addChargedClusterHitsBack.addInputHitMap("hit map ecal of charged front-side mips");
+	    addChargedClusterHitsBack.addInputHitMap("hit map ecal without photons or charged front-side mips");
+	    addChargedClusterHitsBack.setOutputHitMap("hit map ecal without photons");
+	    add(addChargedClusterHitsBack);
+	    //add(new DebugPrintClusterInfo("photon clusters"));
+	    //add(new DebugPrintClusterInfo("charged front-side photon-like clusters"));
 	}
-	
+
 	// Find track segments in ECAL and HCAL
 	{
 	    TrackClusterDriver ecalMIP = new TrackClusterDriver("hit map ecal without photons", "mips ecal", "hit map ecal without mips or photons");
@@ -219,10 +267,13 @@
 	// ID photons
 	{
 	    SimpleNeutralParticleMaker myPhotonIdentifier = new SimpleNeutralParticleMaker(22); // 22 = photon
+	    myPhotonIdentifier.setCalibration(adHocCalib);
+	    //myPhotonIdentifier.setCalibration(new GenericClusterEnergyCalculator());
+	    //myPhotonIdentifier.setCalibration(new PhotonClusterEnergyCalculator());
 	    myPhotonIdentifier.setInputClusterList("photon clusters");
-	    myPhotonIdentifier.setOutputParticleList("photon particles");
+	    myPhotonIdentifier.setOutputParticleList("large photon particles");
 	    add(myPhotonIdentifier);
-	    accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips ecal", "mips hcal", "photon particles" } );
+	    accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons", "hit map hcal without mips", "mips ecal", "mips hcal", "large photon particles" } );
 	}
 
 	// Find clumps in ECAL and HCAL
@@ -234,13 +285,13 @@
 	ClumpFinder findClumpsHCAL = new ClumpFinder("hit map hcal without mips", "clumps hcal", "hit map hcal without mips or clumps");
         add(findClumpsECAL);
         add(findClumpsHCAL);
-	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "photon particles", "clumps ecal", "clumps hcal" } );
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "large photon particles", "clumps ecal", "clumps hcal" } );
 	ListAddDriver<Cluster> mergeClumps = new ListAddDriver<Cluster>();
 	mergeClumps.addInputList("clumps ecal");
         mergeClumps.addInputList("clumps hcal");
         mergeClumps.setOutputList("clumps");
 	add(mergeClumps);
-	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "photon particles", "clumps" } );
+	accountant.addListOfNamedLists( new String[] { "hit map ecal without mips or photons or clumps", "hit map hcal without mips or clumps", "mips ecal", "mips hcal", "large photon particles", "clumps" } );
 
 	// Step 3: Find large-scale hadronic clusters with the MST
 	// Output: List<Cluster>
@@ -260,7 +311,7 @@
         mstHcal.registerMetrics(new MinimumHitToHitDistance());
 	add (mstEcal);
 	add (mstHcal);
-	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters ecal", "mst clusters hcal", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters ecal", "mst clusters hcal", "large photon particles" } );
 
 	// Step 4: Link across the ECAL-HCAL boundary
 	// Input: List<Cluster> for ECAL
@@ -274,7 +325,7 @@
         mstDriverLink.setClusterName("mst clusters linked");
 	mstDriverLink.setPairDecision(new BothCalorimetersDecision());
         add(mstDriverLink);
-	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked", "large photon particles" } );
 
 	// OK. Now we are going to weed out the small (<10 hit) clusters.
 	ClusterListFilterDriver sizeFilterDriver = new ClusterListFilterDriver();
@@ -285,10 +336,10 @@
 	sizeFilterDriver.setOutputHitMapPass("hits from mst clusters linked (>=10 hits)");
 	sizeFilterDriver.setOutputHitMapFail("hits from mst clusters linked (<10 hits)");
 	add(sizeFilterDriver);
-	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "photon particles" } );
-	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "photon particles" } );
-	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "photon particles" } );
-	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "large photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "large photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "large photon particles" } );
+	accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "large photon particles" } );
 
 	if (writeLikelihood) {
 	    //   Step 6a: Make likelihood PDFs
@@ -308,7 +359,7 @@
 	    likelihoodWriter.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
 	    add(likelihoodWriter);
 	    LikelihoodEvaluatorCheckpointDriver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
-	    checkpoint.setVerbose(true);
+	    checkpoint.setDebug(true);
 	    add(checkpoint);
 	} else {
 
@@ -352,7 +403,7 @@
 	    // finish setting up linker
 	    likelihoodLinker.setIgnoreClusterDecision(new ClusterSizeDecision(10));
 	    add(likelihoodLinker);
-	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons", "structural unused hits", "mst clusters linked (<10 hits)", "photon particles" } );
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons", "structural unused hits", "mst clusters linked (<10 hits)", "large photon particles" } );
 
 	    // Step 6b: Halo
 	    // Inputs from event:
@@ -362,7 +413,7 @@
 	    //   * Fleshed-out skeletons (with halo added)
 	    //   * Modified hitmap with any remaining clustered hits
 	    add(new org.lcsim.recon.cluster.structural.HaloAssigner("skeletons", "structural unused hits", "skeletons plus halo", "structural unused hits minus halo"));
-	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons plus halo", "structural unused hits minus halo", "mst clusters linked (<10 hits)", "photon particles" } );
+	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons plus halo", "structural unused hits minus halo", "mst clusters linked (<10 hits)", "large photon particles" } );
 
 	    // Step 6c: Extrapolate tracks to ECAL surface, form charged particles
 
@@ -380,6 +431,7 @@
 		hadIDmip.addClusterList("skeletons plus halo", "skeletons plus halo minus charged particles from mips");
 		hadIDmip.addClusterList("mst clusters linked (<10 hits)", "mst clusters linked (<10 hits) minus charged particles from mips");
 		add(hadIDmip);
+		//hadIDmip.setDebug(true);
 	    }
 
 	    // Then try the clusters generically:
@@ -393,6 +445,7 @@
 		hadID.setInputClusterList("skeletons plus halo minus charged particles from mips");
 		hadID.setOutputParticleList("charged hadron particles with non-mip association");
 		add(hadID);
+		//hadID.setDebug(true);
 	    }
 
 	    // Merge the two particle lists:
@@ -405,49 +458,94 @@
 	    }
 
 	    // Step 6d: Handle fragments
-
-	    org.lcsim.recon.cluster.structural.FragmentHandler fragMergeDriver = new org.lcsim.recon.cluster.structural.FragmentHandler();
-	    fragMergeDriver.addInputClusterList("skeletons plus halo");
-	    fragMergeDriver.addInputClusterList("mst clusters linked (<10 hits)");
-	    fragMergeDriver.setOutputClusterList("clusters with fragments merged");
-	    fragMergeDriver.setOutputHitMap("hits left over after fragment merge");
-	    org.lcsim.recon.cluster.structural.SimpleFragmentIdentifier fragID = new org.lcsim.recon.cluster.structural.SimpleFragmentIdentifier(10, 100.0);
+	    SimpleFragmentIdentifier fragID = new SimpleFragmentIdentifier(10, 100.0);
 	    fragID.addParticleList("charged hadron particles");
-	    fragMergeDriver.setFragmentIdentifier(fragID); // don't cheat
-	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
 
-	    boolean cheatOnFragmentMerge = false;
-	    if (cheatOnFragmentMerge) {
-		// Here are our cluster lists:
-		//   "skeletons plus halo plus orphan mips"
-		//   "mst clusters linked (<10 hits) minus orphan mips"
-		// Here is the MC list:
-		//   "GenFinalStateParticles" [or sim]
-		// Here are the output lists:
-		//   "CheatFragmentIdentifier info P->C"
-		//   "CheatFragmentIdentifier info C->P"
-		ClusterListToHitMapDriver clusterConverter = new ClusterListToHitMapDriver();
-		clusterConverter.addInputList("skeletons plus halo");
-		clusterConverter.addInputList("mst clusters linked (<10 hits)");
-		clusterConverter.setOutputHitMap("HitMap for CheatFragmentIdentifier");
-		add(clusterConverter);
-		add(new HitMapToHitListDriver("HitMap for CheatFragmentIdentifier", "HitList for CheatFragmentIdentifier"));
-		add(new HitMapToHitListDriver("structural unused hits minus halo", "HitList2 for CheatFragmentIdentifier"));
-		String[] hitListNames = { "HitList for CheatFragmentIdentifier", "HitList2 for CheatFragmentIdentifier" } ; // Do we need HitList2?
-		String[] clusterListNames = { "skeletons plus halo", "mst clusters linked (<10 hits)" } ;
-		org.lcsim.recon.cluster.structural.CheatFragmentIdentifier cheatFragID = new org.lcsim.recon.cluster.structural.CheatFragmentIdentifier(hitListNames, clusterListNames, mcListName, "CheatFragmentIdentifier info P->C", "CheatFragmentIdentifier info C->P");
-		fragMergeDriver.setFragmentIdentifier(cheatFragID); // cheat
-		org.lcsim.recon.cluster.structural.CheatFragmentMerger cheatFragMerge = new org.lcsim.recon.cluster.structural.CheatFragmentMerger();
-		cheatFragMerge.initializeAssociator("TmpHitList", "TmpClusterList", mcListName, "TmpMCCList", "TmpCMCList");
-		fragMergeDriver.setFragmentMerger(cheatFragMerge); // cheat [broken?]
-		add(cheatFragMerge);
-	    } else {
+	    {
+		// First, "fragments" in the first few ECAL layers are probably photons.
+		FragmentIdentifierDecisionMaker isFragmentDecisionDriver = new FragmentIdentifierDecisionMaker(fragID);
+		AndDecisionMakerSingle<Cluster> isSmallPhoton = new AndDecisionMakerSingle<Cluster>();
+		isSmallPhoton.addDecisionMaker(isFragmentDecisionDriver);
+		isSmallPhoton.addDecisionMaker(new ClusterFirstLayerDecision(4, new HitInECALDecision()));
+		ClusterListFilterDriver isSmallPhotonFilter1 = new ClusterListFilterDriver();
+		ClusterListFilterDriver isSmallPhotonFilter2 = new ClusterListFilterDriver();
+		isSmallPhotonFilter1.setInputDecision(isSmallPhoton);
+		isSmallPhotonFilter2.setInputDecision(isSmallPhoton);
+		isSmallPhotonFilter1.setInputList("skeletons plus halo");
+		isSmallPhotonFilter2.setInputList("mst clusters linked (<10 hits)");
+		isSmallPhotonFilter1.setOutputList("small photon clusters 1");
+		isSmallPhotonFilter2.setOutputList("small photon clusters 2");
+		isSmallPhotonFilter1.setOutputClusterListFail("skeletons plus halo minus small photons");
+		isSmallPhotonFilter2.setOutputClusterListFail("mst clusters linked (<10 hits) minus small photons");
+		ListAddDriver<Cluster> mergeSmallPhotonClusters = new ListAddDriver<Cluster>();
+		mergeSmallPhotonClusters.addInputList("small photon clusters 1");
+		mergeSmallPhotonClusters.addInputList("small photon clusters 2");
+		mergeSmallPhotonClusters.setOutputList("small photon clusters");
+		SimpleNeutralParticleMaker smallPhotonMaker = new SimpleNeutralParticleMaker(22);
+		smallPhotonMaker.setCalibration(adHocCalib);
+		smallPhotonMaker.setInputClusterList("small photon clusters");
+		smallPhotonMaker.setOutputParticleList("small photon particles");
+		//smallPhotonMaker.setCalibration(new GenericClusterEnergyCalculator());
+		//smallPhotonMaker.setCalibration(new PhotonClusterEnergyCalculator());
+
+		add(isFragmentDecisionDriver);
+		add(isSmallPhotonFilter1);
+		add(isSmallPhotonFilter2);
+		add(mergeSmallPhotonClusters);
+		add(smallPhotonMaker);
+	    }
+
+	    {
+		org.lcsim.recon.cluster.structural.FragmentHandler fragMergeDriver = new org.lcsim.recon.cluster.structural.FragmentHandler();
+		org.lcsim.recon.cluster.structural.FragmentHandler fragNoMergeDriver = new org.lcsim.recon.cluster.structural.FragmentHandler();
+		fragMergeDriver  .addInputClusterList("skeletons plus halo minus small photons");
+		fragNoMergeDriver.addInputClusterList("skeletons plus halo minus small photons");
+		fragMergeDriver  .addInputClusterList("mst clusters linked (<10 hits) minus small photons");
+		fragNoMergeDriver.addInputClusterList("mst clusters linked (<10 hits) minus small photons");
+		fragMergeDriver  .setOutputClusterList("clusters with fragments merged");
+		fragNoMergeDriver.setOutputClusterList("clusters with fragments unmerged");
+		fragMergeDriver  .setOutputHitMap("hits left over after fragment merge");
+		fragNoMergeDriver.setOutputHitMap("hits left over after fragment unmerge");
+		fragMergeDriver  .setFragmentIdentifier(fragID); // don't cheat
+		fragNoMergeDriver.setFragmentIdentifier(fragID); // don't cheat
+		
 		org.lcsim.recon.cluster.structural.SimpleFragmentMerger fragMerge = new org.lcsim.recon.cluster.structural.SimpleFragmentMerger();
-		fragMergeDriver.setFragmentMerger(fragMerge); // don't cheat
+		org.lcsim.recon.cluster.structural.DropFragments fragNoMerge = new org.lcsim.recon.cluster.structural.DropFragments();
+		fragMergeDriver.setFragmentMerger(fragMerge); // merge, don't cheat
+		fragNoMergeDriver.setFragmentMerger(fragNoMerge); // don't merge, don't cheat
 		add(fragMergeDriver);
+		add(fragNoMergeDriver);
+		
+		accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "large photon particles", "small photons" } );
+		accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments unmerged", "hits left over after fragment unmerge", "large photon particles", "small photons" } );
 	    }
-
-	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
+	    
+// 	    boolean cheatOnFragmentMerge = false;
+// 	    if (cheatOnFragmentMerge) {
+// 		// Here are our cluster lists:
+// 		//   "skeletons plus halo plus orphan mips"
+// 		//   "mst clusters linked (<10 hits) minus orphan mips"
+// 		// Here is the MC list:
+// 		//   "GenFinalStateParticles" [or sim]
+// 		// Here are the output lists:
+// 		//   "CheatFragmentIdentifier info P->C"
+// 		//   "CheatFragmentIdentifier info C->P"
+// 		ClusterListToHitMapDriver clusterConverter = new ClusterListToHitMapDriver();
+// 		clusterConverter.addInputList("skeletons plus halo");
+// 		clusterConverter.addInputList("mst clusters linked (<10 hits)");
+// 		clusterConverter.setOutputHitMap("HitMap for CheatFragmentIdentifier");
+// 		add(clusterConverter);
+// 		add(new HitMapToHitListDriver("HitMap for CheatFragmentIdentifier", "HitList for CheatFragmentIdentifier"));
+// 		add(new HitMapToHitListDriver("structural unused hits minus halo", "HitList2 for CheatFragmentIdentifier"));
+// 		String[] hitListNames = { "HitList for CheatFragmentIdentifier", "HitList2 for CheatFragmentIdentifier" } ; // Do we need HitList2?
+// 		String[] clusterListNames = { "skeletons plus halo", "mst clusters linked (<10 hits)" } ;
+// 		org.lcsim.recon.cluster.structural.CheatFragmentIdentifier cheatFragID = new org.lcsim.recon.cluster.structural.CheatFragmentIdentifier(hitListNames, clusterListNames, mcListName, "CheatFragmentIdentifier info P->C", "CheatFragmentIdentifier info C->P");
+// 		fragMergeDriver.setFragmentIdentifier(cheatFragID); // cheat
+// 		org.lcsim.recon.cluster.structural.CheatFragmentMerger cheatFragMerge = new org.lcsim.recon.cluster.structural.CheatFragmentMerger();
+// 		cheatFragMerge.initializeAssociator("TmpHitList", "TmpClusterList", mcListName, "TmpMCCList", "TmpCMCList");
+// 		fragMergeDriver.setFragmentMerger(cheatFragMerge); // cheat [broken?]
+// 		add(cheatFragMerge);
+// 	    }
 
 	    // Repeat the hadron ID step with the revised cluster list:
 
@@ -464,6 +562,27 @@
 		hadIDmip2.setOutputMIPList("unmatched mips 2");
 		hadIDmip2.addClusterList("clusters with fragments merged", "clusters with fragments merged minus charged particles from mips");
 		add(hadIDmip2);
+		//mipMatch.setSemiDebug(true);
+		hadIDmip2.setCheckEoverP(true);
+		hadIDmip2.setCalibration(adHocCalib);
+
+	    }
+	    {
+		MIPChargedParticleMaker hadIDmip2 = new MIPChargedParticleMaker();
+		SimpleTrackMIPClusterMatcher mipMatch = new SimpleTrackMIPClusterMatcher();
+		add(mipMatch);
+		hadIDmip2.setTrackMatcher(mipMatch);
+		hadIDmip2.setInputTrackList(trackList);
+		hadIDmip2.setOutputTrackList("tracks minus mip associations 2");
+		hadIDmip2.setInputMIPList("mips");
+		hadIDmip2.setOutputParticleList("charged hadron particles with mip association 2 [no frag merge]");
+		hadIDmip2.setOutputMIPList("unmatched mips 2 [no frag merge]");
+		hadIDmip2.addClusterList("clusters with fragments unmerged", "clusters with fragments unmerged minus charged particles from mips");
+		add(hadIDmip2);
+		//mipMatch.setSemiDebug(true);
+		hadIDmip2.setCheckEoverP(true);
+		hadIDmip2.setCalibration(adHocCalib);
+
 	    }
 
 	    // Then try the clusters generically:
@@ -476,7 +595,28 @@
 		hadID2.setOutputTrackList("leftover tracks 2");
 		hadID2.setInputClusterList("clusters with fragments merged minus charged particles from mips");
 		hadID2.setOutputParticleList("charged hadron particles with non-mip association 2");
+		clusMatch.setCheckEoverP(true);
+		clusMatch.setCalibration(adHocCalib);
 		add(hadID2);
+		//clusMatch.setSemiDebug(true);
+		//clusMatch.setDebug(true);
+		//hadID2.setDebug(true);
+	    }
+	    {
+		SimpleChargedParticleMaker hadID2 = new SimpleChargedParticleMaker();
+		SimpleTrackClusterMatcher clusMatch = new SimpleTrackClusterMatcher();
+		add(clusMatch);
+		hadID2.setTrackMatcher(clusMatch);
+		hadID2.setInputTrackList("tracks minus mip associations 2");
+		hadID2.setOutputTrackList("leftover tracks 2");
+		hadID2.setInputClusterList("clusters with fragments unmerged minus charged particles from mips");
+		hadID2.setOutputParticleList("charged hadron particles with non-mip association 2 [no frag merge]");
+		clusMatch.setCheckEoverP(true);
+		clusMatch.setCalibration(adHocCalib);
+		add(hadID2);
+		//clusMatch.setSemiDebug(true);
+		//clusMatch.setDebug(true);
+		//hadID2.setDebug(true);
 	    }
 
 	    // Merge the two particle lists:
@@ -487,36 +627,84 @@
 		mergeParticles.setOutputList("charged hadron particles 2");
 		add(mergeParticles);
 	    }
+	    {
+		ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+		mergeParticles.addInputList("charged hadron particles with mip association 2 [no frag merge]");
+		mergeParticles.addInputList("charged hadron particles with non-mip association 2 [no frag merge]");
+		mergeParticles.setOutputList("charged hadron particles 2 [no frag merge]");
+		add(mergeParticles);
+	    }
 
 	    // ... and then any remaining clusters should be neutral
-	    ClusterListFilterDriver removeChargedClusters = new ClusterListFilterDriver();
-	    VetoClustersFromParticles vetoCharged = new VetoClustersFromParticles("charged hadron particles 2");
-	    removeChargedClusters.setInputDecision(vetoCharged);
-	    removeChargedClusters.setInputList("clusters with fragments merged");
-	    removeChargedClusters.setOutputList("neutral clusters with fragments merged");
-	    add(vetoCharged);
-	    add(removeChargedClusters);
-	    //SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(310); // everything is a Ks0...
-	    SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(22); // everything is a photon
-	    hadID3.setInputClusterList("neutral clusters with fragments merged");
-	    hadID3.setOutputParticleList("neutral hadron particles");
-	    add(hadID3);
-	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "charged hadron particles 2", "neutral hadron particles", "hits left over after fragment merge", "photon particles" } );
+	    {
+		ClusterListFilterDriver removeChargedClusters = new ClusterListFilterDriver();
+		VetoClustersFromParticles vetoCharged = new VetoClustersFromParticles("charged hadron particles 2");
+		removeChargedClusters.setInputDecision(vetoCharged);
+		removeChargedClusters.setInputList("clusters with fragments merged");
+		removeChargedClusters.setOutputList("neutral clusters with fragments merged");
+		add(vetoCharged);
+		add(removeChargedClusters);
+		//SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(310); // everything is a Ks0...
+		SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(22); // everything is a photon
+		hadID3.setCalibration(adHocCalib);
+		//hadID3.setCalibration(new GenericClusterEnergyCalculator());
+		hadID3.setInputClusterList("neutral clusters with fragments merged");
+		hadID3.setOutputParticleList("neutral hadron particles");
+		add(hadID3);
+		accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "charged hadron particles 2", "neutral hadron particles", "hits left over after fragment merge", "large photon particles", "small photons" } );
+	    }
+	    {
+		ClusterListFilterDriver removeChargedClusters = new ClusterListFilterDriver();
+		VetoClustersFromParticles vetoCharged = new VetoClustersFromParticles("charged hadron particles 2 [no frag merge]");
+		removeChargedClusters.setInputDecision(vetoCharged);
+		removeChargedClusters.setInputList("clusters with fragments unmerged");
+		removeChargedClusters.setOutputList("neutral clusters with fragments unmerged");
+		add(vetoCharged);
+		add(removeChargedClusters);
+		//SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(310); // everything is a Ks0...
+		SimpleNeutralParticleMaker hadID3 = new SimpleNeutralParticleMaker(22); // everything is a photon
+		hadID3.setCalibration(adHocCalib);
+		//hadID3.setCalibration(new GenericClusterEnergyCalculator());
+		hadID3.setInputClusterList("neutral clusters with fragments unmerged");
+		hadID3.setOutputParticleList("neutral hadron particles [no frag merge]");
+		add(hadID3);
+		accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "charged hadron particles 2 [no frag merge]", "neutral hadron particles [no frag merge]", "hits left over after fragment unmerge", "large photon particles", "small photons" } );
+	    }
+
 	    // Merge together all the particle lists:
-	    ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
-	    mergeParticles.addInputList("charged hadron particles 2");
-	    mergeParticles.addInputList("neutral hadron particles");
-	    mergeParticles.addInputList("photon particles");
-	    mergeParticles.setOutputList("all particles");
-	    add(mergeParticles);
-	    accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits left over after fragment merge", "all particles" } );
+	    {
+		ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+		mergeParticles.addInputList("charged hadron particles 2");
+		mergeParticles.addInputList("neutral hadron particles");
+		mergeParticles.addInputList("large photon particles");
+		mergeParticles.addInputList("small photons");
+		mergeParticles.setOutputList("all particles");
+		add(mergeParticles);
+		accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits left over after fragment merge", "all particles" } );
+	    }
+	    {
+		ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+		mergeParticles.addInputList("charged hadron particles 2 [no frag merge]");
+		mergeParticles.addInputList("neutral hadron particles [no frag merge]");
+		mergeParticles.addInputList("large photon particles");
+		mergeParticles.addInputList("small photons");
+		mergeParticles.setOutputList("all particles [no frag merge]");
+		add(mergeParticles);
+		accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits left over after fragment unmerge", "all particles [no frag merge]" } );
+	    }
+
 	    // Step 6e: Plots
-	    ConfusionPlotter confPlot = new ConfusionPlotter("all particles", mcListName);
-	    
-	    add(confPlot);
-	    //add(new DebugInfoParticleList("all particles"));
+	    {
+		ConfusionPlotter confPlot = new ConfusionPlotter("all particles", mcListName, "confusion.aida");
+		ConfusionPlotter confPlotNoMerge = new ConfusionPlotter("all particles [no frag merge]", mcListName, "confusion-nomerge.aida");
+		confPlot.setDebug(false);
+		confPlotNoMerge.setDebug(false);
+		add(confPlot);
+		add(confPlotNoMerge);
+	    }
+
 	    add(new EnergySumPlotter("all particles", "test.aida"));
-	    //add(new DebugCheckHitCounts("all particles", "input hit map ecal", "input hit map hcal"));
+
 	    // Go to work finding the correction for missing energy:
 	    MapToHitMapDriver convertECAL = new MapToHitMapDriver();
 	    MapToHitMapDriver convertHCAL = new MapToHitMapDriver();
@@ -533,18 +721,22 @@
 	    add(adder);
 
 	    // Make plots:
-	    
-	    CorrectedEnergySumPlotter recoPlotter = new CorrectedEnergySumPlotter("converted all", "all particles", mcListName, "reco-corrected.aida");
-	    //CorrectedEnergySumPlotter recoPlotter = new org.lcsim.recon.pfa.output.MatCalibrationPlots("converted all", "all particles", mcListName, "reco-corrected.aida");
-	    add(recoPlotter);
-	    add(new EnergySumPlotter("all particles", "reco-uncorrected.aida"));
+	    {	    
+		CorrectedEnergySumPlotter recoPlotter = new CorrectedEnergySumPlotter("converted all", "all particles", mcListName, "reco-corrected.aida");
+		CorrectedEnergySumPlotter recoPlotterNoMerge = new CorrectedEnergySumPlotter("converted all", "all particles [no frag merge]", mcListName, "reco-corrected-nomerge.aida");
+		add(recoPlotter);
+		add(recoPlotterNoMerge);
+		add(new EnergySumPlotter("all particles", "reco-uncorrected.aida"));
+		add(new EnergySumPlotter("all particles [no frag merge]", "reco-uncorrected-nomerge.aida"));
+	    }
 
             // Make plots with Ron's ClusterAnalysis routine:
-            // all particles = "charged hadron particles 2" + "neutral hadron particles" + "photon particles"
+            // all particles = "charged hadron particles 2" + "neutral hadron particles" + "large photon particles" + "small photons"
             add(new ParticleListToClusterListDriver("charged hadron particles 2", "clusters of charged hadron particles 2"));
             add(new ParticleListToClusterListDriver("neutral hadron particles", "clusters of neutral hadron particles"));
-            add(new ParticleListToClusterListDriver("photon particles", "clusters of photon particles"));
-            String[] MSTClusternames1 = {"clusters of charged hadron particles 2", "clusters of neutral hadron particles", "clusters of photon particles"};
+            add(new ParticleListToClusterListDriver("large photon particles", "clusters of large photon particles"));
+            add(new ParticleListToClusterListDriver("small photons", "clusters of small photons"));
+            String[] MSTClusternames1 = {"clusters of charged hadron particles 2", "clusters of neutral hadron particles", "clusters of large photon particles", "clusters of small photons"};
             String[] hitcollnames1 = {"EcalBarrDigiHits","EcalEndcapDigiHits","HcalBarrDigiHits","HcalEndcapDigiHits"};
             add(new ClusterAnalysisDriver(MSTClusternames1,hitcollnames1, mcListName, "MatPlots"));
 
@@ -638,11 +830,17 @@
 	    boolean eventCutPassedQuark = (Math.abs(cosThetaQuark) < threshold);
 	    boolean eventCutPassedAntiQuark = (Math.abs(cosThetaAntiQuark) < threshold);
 	    if (eventCutPassedQuark != eventCutPassedAntiQuark) {
-		System.out.println("Quark-antiquark ambiguity!");
-		System.out.println("mostEnergeticQuark has pdg="+mostEnergeticQuark.getPDGID()+" and energy "+mostEnergeticQuark.getEnergy()+" and momentum=("+mostEnergeticQuark.getMomentum().x()+","+mostEnergeticQuark.getMomentum().y()+","+mostEnergeticQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
-		System.out.println("mostEnergeticAntiQuark pdg="+mostEnergeticAntiQuark.getPDGID()+" and energy "+mostEnergeticAntiQuark.getEnergy()+" and momentum=("+mostEnergeticAntiQuark.getMomentum().x()+","+mostEnergeticAntiQuark.getMomentum().y()+","+mostEnergeticAntiQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
-		System.out.println("failing this event...");
-		return false;
+		// One passed, one failed. Check in case it's borderline -- could be numerical precision or similar
+		if (Math.abs(cosThetaQuark) < threshold*1.01 && Math.abs(cosThetaAntiQuark) < threshold*1.01) {
+		    // OK, close enough
+		    return true;
+		} else {
+		    System.out.println("Quark-antiquark ambiguity!");
+		    System.out.println("mostEnergeticQuark has pdg="+mostEnergeticQuark.getPDGID()+" and energy "+mostEnergeticQuark.getEnergy()+" and momentum=("+mostEnergeticQuark.getMomentum().x()+","+mostEnergeticQuark.getMomentum().y()+","+mostEnergeticQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
+		    System.out.println("mostEnergeticAntiQuark pdg="+mostEnergeticAntiQuark.getPDGID()+" and energy "+mostEnergeticAntiQuark.getEnergy()+" and momentum=("+mostEnergeticAntiQuark.getMomentum().x()+","+mostEnergeticAntiQuark.getMomentum().y()+","+mostEnergeticAntiQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
+		    System.out.println("failing this event...");
+		    return false;
+		}
 	    } else {
 		return eventCutPassedQuark;
 	    }
CVSspam 0.2.8