Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
NonTrivialPFA.java+94-1251.8 -> 1.9
MJC: Several PFA fixes and updates; also cleanup of code.

lcsim/src/org/lcsim/contrib/uiowa
NonTrivialPFA.java 1.8 -> 1.9
diff -u -r1.8 -r1.9
--- NonTrivialPFA.java	26 Apr 2007 17:24:48 -0000	1.8
+++ NonTrivialPFA.java	16 May 2007 19:38:37 -0000	1.9
@@ -23,8 +23,6 @@
 import org.lcsim.recon.pfa.identifier.SimpleNeutralParticleMaker;
 import org.lcsim.recon.pfa.identifier.SimpleChargedParticleMaker;
 import org.lcsim.recon.pfa.identifier.MIPChargedParticleMaker;
-import org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher;
-import org.lcsim.recon.pfa.identifier.SimpleTrackMIPClusterMatcher;
 import org.lcsim.recon.pfa.identifier.TrackClusterMatcher;
 import org.lcsim.recon.pfa.identifier.SmallPhotonMaker;
 import org.lcsim.recon.cluster.util.BothCalorimetersDecision;
@@ -90,12 +88,19 @@
 	PhotonClusterEnergyCalculator ronPhotonCalib = new PhotonClusterEnergyCalculator();
 	GenericClusterEnergyCalculator ronGenericCalib = new GenericClusterEnergyCalculator();
 	DetailedNeutralHadronClusterEnergyCalculator ronNeutralHadronCalib = new DetailedNeutralHadronClusterEnergyCalculator();
+	ModifiedDetailedNeutralHadronClusterEnergyCalculator ronChargedHadronCalib = new ModifiedDetailedNeutralHadronClusterEnergyCalculator();
+	ronChargedHadronCalib.setMinimumEnergy(0.0);
+	ronChargedHadronCalib.setDoInversion(false);
 
 	// Set up the MC lists
 	// -------------------
 
 	CreateFinalStateMCParticleList mcListMakerGen = new CreateFinalStateMCParticleList("Gen");
 	CreateFinalStateMCParticleList mcListMakerSim = new CreateFinalStateMCParticleList("Sim");
+	double rcut = 300.;  //  Bruce said 400 mm at meeting March 13
+	double zcut = 300.;
+	mcListMakerSim.setRadiusCut(rcut);
+	mcListMakerSim.setZCut(zcut);
 	add(mcListMakerGen);
 	add(mcListMakerSim);
 	String mcListNameGen = "GenFinalStateParticles";
@@ -144,7 +149,7 @@
 	    
 	    // Find tracks (cheating)
             // Code taken from Steve Magill
-            String cheatingTrackList = "PerfectTracks";
+            String cheatingTrackList = "Tracks (ron)";
             {
                 // Make cheat tracks and cheat clusters
                 String Tname = "RefinedCheatTracks";
@@ -162,23 +167,24 @@
                 
                 // Make perfect particles from cheat reconstructed particles
                 String outName = "PerfectRecoParticles";
-                int minT = 0;
-                int minC = 0;
-                org.lcsim.contrib.Cassell.recon.Cheat.PPRParticleDriver d = new org.lcsim.contrib.Cassell.recon.Cheat.PPRParticleDriver(CRPname, outName);
+		String outNameMC = "PerfectRecoMCParticles";
+                int minT = 4;
+                int minC = 1;
+                org.lcsim.contrib.Cassell.recon.Cheat.PPRParticleDriver d = new org.lcsim.contrib.Cassell.recon.Cheat.PPRParticleDriver(CRPname, outName, outNameMC);
                 d.setMinTrackerHits(minT);
                 d.setMinCalorimeterHits(minC);
                 add(d);
-		
-                // Make perfect tracks from perfect particles
-                org.lcsim.contrib.SteveMagill.PerfectTrackDriver perftrk = new org.lcsim.contrib.SteveMagill.PerfectTrackDriver();
-                perftrk.setParticleNames(outName);
-                perftrk.setTrackNames(cheatingTrackList);
-                add(perftrk);
+
+		// Make up list of tracks
+		org.lcsim.mc.fast.tracking.MCFastTracking ronFastMC = new org.lcsim.mc.fast.tracking.MCFastTracking();
+		ronFastMC.setOutputList(cheatingTrackList);
+		ronFastMC.setFSList("PerfectRecoMCParticles");
+		add(ronFastMC);   
             }
 	    
 	    // Choose which track list to use
-	    //trackList = cheatingTrackList;
-	    trackList = nonCheatingTrackList;
+	    trackList = cheatingTrackList;
+	    //trackList = nonCheatingTrackList;
 	}
 
 	// Find simple clusters
@@ -192,7 +198,12 @@
 	    String inputTrackList = trackList;
 	    String photonClusterList = "photon clusters";
 	    String outputHitMap = "hit map ecal without photons";
-	    addPhotonFinder(prefix, inputHitMap, inputTrackList, photonClusterList, outputHitMap);
+	    boolean cheatOnPhotons = false;
+	   if (cheatOnPhotons) {
+		addCheatingPhotonFinder(prefix, inputHitMap, inputTrackList, photonClusterList, outputHitMap, mcListName);
+	    } else {
+		addPhotonFinder(prefix, inputHitMap, inputTrackList, photonClusterList, outputHitMap);
+	    }
 	    prefix = "photonmaker: ";
 	    addPhotonParticleMaker(prefix, photonClusterList, largePhotonParticleList, ronPhotonCalib);
 	    // Book-keeping
@@ -293,15 +304,28 @@
 	    String prefix = "linker: ";
 	    String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
 	    String[] inputClusterListsForAssociator = {"mips", "clumps"};
-	    //addStructuralLinker(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1);
-	    //addCheatingStructuralLinker(prefix, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1, inputHitListsForAssociator, inputClusterListsForAssociator, mcListName);
-	    addStructuralLinkerWithPlots(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1, inputHitListsForAssociator, inputClusterListsForAssociator, mcListName);
+	    boolean cheatOnLikelihood = false;
+	    if (cheatOnLikelihood) {
+		addCheatingStructuralLinker(prefix, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1, inputHitListsForAssociator, inputClusterListsForAssociator, mcListName);
+	    } else {
+		boolean makeExtraLikelihoodPlots = false;
+		if (makeExtraLikelihoodPlots) {
+		    addStructuralLinkerWithPlots(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1, inputHitListsForAssociator, inputClusterListsForAssociator, mcListName);
+		} else {
+		    addStructuralLinker(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1);
+		}
+	    }
 	    // Book-keeping
 	    accountant.addListOfNamedLists( new String[] { smallClusterList, unusedHitMap1, skeletonClusterList, "photon clusters" } );
 
+	    // Check track matches are sensible; don't match multiple
+	    // tracks to the same skeleton.
+	    String splitSkeletonClusterList = "skeletons (split)";
+	    add(new CheckSkeletonsForMultipleTracks(eval, trackList, skeletonClusterList, splitSkeletonClusterList, inputMIPList, inputClumpList));
+
 	    // Add halo of nearby hits
 	    prefix = "halo: ";
- 	    addHaloAssigner(prefix, skeletonClusterList, unusedHitMap1, haloClusterList, unusedHitMap2);
+ 	    addHaloAssigner(prefix, splitSkeletonClusterList, unusedHitMap1, haloClusterList, unusedHitMap2);
 	    // Book-keeping
 	    accountant.addListOfNamedLists( new String[] { smallClusterList, unusedHitMap2, haloClusterList, "photon clusters" } );
 
@@ -327,20 +351,30 @@
 	    // Support classes to merge/handle fragments
 	    SimpleFragmentMerger fragMerge = new SimpleFragmentMerger();
 	    DropFragments fragNoMerge = new DropFragments();
-	    CheatFragmentIdentifier fragMergeCheat = setUpCheatFragmentIdentifier("cheatid: ", haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, unusedHitMap2, mcListName);
+	    CheatFragmentIdentifier fragIDCheat = setUpCheatFragmentIdentifier("cheatid: ", haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, unusedHitMap2, mcListName);
 
-	    // Now we can fork according to what calibration/options we want to use.
-	    addHadronFinders("merge/ron/ron: ", "all particles (ron calib)", fragID, fragMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, ronNeutralHadronCalib);
-	    addHadronFinders("nomerge/ron/ron: ", "all particles [no frag merge] (ron calib)", fragID, fragNoMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, ronNeutralHadronCalib);
-	    addHadronFinders("merge/ron/adhoc: ", "all particles (ron calib, adhoc calib for E/p)", fragID, fragMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, adHocCalib);
-	    addHadronFinders("nomerge/ron/adhoc: ", "all particles [no frag merge] (ron calib, adhoc calib for E/p)", fragID, fragNoMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, adHocCalib);
-	    addHadronFinders("merge/adhoc/adhoc: ", "all particles (adhoc calib)", fragID, fragMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, adHocCalib, adHocCalib);
-	    addHadronFinders("nomerge/adhoc/adhoc: ", "all particles [no frag merge] (adhoc calib)", fragID, fragNoMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, adHocCalib, adHocCalib);
+	    boolean cheatOnFragments = false;
+	    if (cheatOnFragments) {
+		addHadronFinders("merge/ron/ron: ", "all particles (ron calib)", fragIDCheat, fragMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, ronNeutralHadronCalib, false);
+	    } else {
+		addHadronFinders("merge/ron/ron: ", "all particles (ron calib)", fragID, fragMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, ronChargedHadronCalib, true);
+	    }
+
+	    // Optionally, other hadron finders with various calibrations etc:
+	    //addHadronFinders("nomerge/ron/ron: ", "all particles [no frag merge] (ron calib)", fragID, fragNoMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, ronChargedHadronCalib, false);
+	    //addHadronFinders("merge/ron/adhoc: ", "all particles (ron calib, adhoc calib for E/p)", fragID, fragMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, adHocCalib, false);
+	    //addHadronFinders("nomerge/ron/adhoc: ", "all particles [no frag merge] (ron calib, adhoc calib for E/p)", fragID, fragNoMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, ronNeutralHadronCalib, adHocCalib, false);
+	    //addHadronFinders("merge/adhoc/adhoc: ", "all particles (adhoc calib)", fragID, fragMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, adHocCalib, adHocCalib, false);
+	    //addHadronFinders("nomerge/adhoc/adhoc: ", "all particles [no frag merge] (adhoc calib)", fragID, fragNoMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, adHocCalib, adHocCalib, false);
 
 	    // Make sure lists are accessible to Ron using old names.
 	    add(new ListFilterDriver(new DummyDecisionMakerSingle<ReconstructedParticle>(), "merge/ron/ron: neutral hadron particles", "neutral hadron particles (ron calib)", ReconstructedParticle.class));
 	    add(new ListFilterDriver(new DummyDecisionMakerSingle<ReconstructedParticle>(), "merge/ron/ron: charged hadron particles after fragment handling", "charged hadron particles 2", ReconstructedParticle.class));
 
+	    // Now try to be a bit smarter. We can try to handle the cases where >1 track is connected to a cluster.
+	    // Currently buggy -- not stable enough to use in production yet.
+	    //add(new HandleMultiTrackClusters("all particles (ron calib)", "all particles (ron calib) -- one track per cluster" ));
+
 	    // Book-keeping
 	    accountant.addListOfNamedLists( new String[] { smallPhotonParticleList, largePhotonParticleList, "merge/ron/ron: neutral hadron particles", "merge/ron/ron: charged hadron particles after fragment handling" } );
 	}
@@ -358,17 +392,6 @@
             }
         }
 
-	// This is a clumsy way of doing a veto.
-	// Require >= 84 GeV in barrel:
-        //if (checkEnergyBarrel(event, 0.923)) {
-	//super.process(event);
-	//}
-
-	// Require in barrel (i.e. primary qqbar pair has
-	// |cos(theta)| < 1/sqrt(2)):
-	//if (angleCheckLei(event, 0.707106781)) {
-	//    super.process(event);
-	//}
         super.process(event);
     }
 
@@ -376,81 +399,6 @@
 	super.suspend();
     }
 
-    protected boolean angleCheckLei(EventHeader event, double threshold) 
-    {
-	// Require that the primary qqbar pair has |cos(theta)| < threshold
-	// where theta is the polar angle, with theta=0 being the beam direction.
-	// Lei suggests a cut of |cos(theta)| < sqrt(0.5)
-	MCParticle mostEnergeticQuark = null;
-	MCParticle mostEnergeticAntiQuark = null;
-	List<MCParticle> mcParticles = event.getMCParticles();
-	for (MCParticle part : mcParticles) {
-	    int pdg = part.getPDGID();
-	    if (pdg>0 && pdg<7) {
-		if (mostEnergeticQuark==null || part.getEnergy() > mostEnergeticQuark.getEnergy()) {
-		    mostEnergeticQuark = part;
-		}
-	    } else if (pdg<0 && pdg>-7) {
-		if (mostEnergeticAntiQuark==null || part.getEnergy() > mostEnergeticAntiQuark.getEnergy()) {
-		    mostEnergeticAntiQuark = part;
-		}
-	    }
-	}
-	if (mostEnergeticQuark==null || mostEnergeticAntiQuark==null) {
-	    System.out.println("No quark/antiquark!");
-	    System.out.println("mostEnergeticQuark="+mostEnergeticQuark);
-	    System.out.println("mostEnergeticAntiQuark="+mostEnergeticAntiQuark);
-	    System.out.println("failing this event...");
-	    return false;
-	} else {
-	    double cosThetaQuark = mostEnergeticQuark.getMomentum().z() / mostEnergeticQuark.getMomentum().magnitude();
-	    double cosThetaAntiQuark = mostEnergeticAntiQuark.getMomentum().z() / mostEnergeticAntiQuark.getMomentum().magnitude();
-	    boolean eventCutPassedQuark = (Math.abs(cosThetaQuark) < threshold);
-	    boolean eventCutPassedAntiQuark = (Math.abs(cosThetaAntiQuark) < threshold);
-	    if (eventCutPassedQuark != eventCutPassedAntiQuark) {
-		// One passed, one failed. Check in case it's borderline -- could be numerical precision or similar
-		if (Math.abs(cosThetaQuark) < threshold*1.01 && Math.abs(cosThetaAntiQuark) < threshold*1.01) {
-		    // OK, close enough
-		    return true;
-		} else {
-		    System.out.println("Quark-antiquark ambiguity!");
-		    System.out.println("mostEnergeticQuark has pdg="+mostEnergeticQuark.getPDGID()+" and energy "+mostEnergeticQuark.getEnergy()+" and momentum=("+mostEnergeticQuark.getMomentum().x()+","+mostEnergeticQuark.getMomentum().y()+","+mostEnergeticQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
-		    System.out.println("mostEnergeticAntiQuark pdg="+mostEnergeticAntiQuark.getPDGID()+" and energy "+mostEnergeticAntiQuark.getEnergy()+" and momentum=("+mostEnergeticAntiQuark.getMomentum().x()+","+mostEnergeticAntiQuark.getMomentum().y()+","+mostEnergeticAntiQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
-		    System.out.println("failing this event...");
-		    return false;
-		}
-	    } else {
-		return eventCutPassedQuark;
-	    }
-	}
-    }
-
-    protected boolean checkEnergyBarrel(EventHeader event, double threshold) 
-    {
-        // Require that threshold (e.g. 90%) of the final-state particles are within the barrel ( cos(theta) < 0.8 )    
-        double energySumBarrel = 0.0;
-        double energySumNonBarrel = 0.0;
-        List<MCParticle> mcps = event.getMCParticles();
-        //List<MCParticle> mcps = event.get(MCParticle.class, "GenFinalStateParticles");
-        for (MCParticle mcp : mcps) {
-            if (mcp.getGeneratorStatus() == mcp.FINAL_STATE) {
-                Hep3Vector momentum = mcp.getMomentum();
-                double cosTheta = momentum.z() / momentum.magnitude();
-                if (Math.abs(cosTheta) < 0.8) {
-                    // barrel
-                    energySumBarrel += mcp.getEnergy();
-                } else {
-                    // non-barrel
-                    energySumNonBarrel += mcp.getEnergy();
-                }
-            }
-        }
-        double energySumTotal = energySumBarrel + energySumNonBarrel;
-	//System.out.println("DEBUG: Energy in barrel is "+energySumBarrel+"/"+energySumTotal+" = "+(energySumBarrel/energySumTotal));
-        return (energySumBarrel / energySumTotal > threshold);
-    }
-
-
     protected void makeEventInfoList(LikelihoodEvaluator eval) 
     {
         // Handle things that have per-event info:
@@ -469,6 +417,20 @@
     List<StructuralLikelihoodQuantityWithEventInfo> m_perEventQuantities = null;
 
 
+    protected void addCheatingPhotonFinder(String prefix, String inputHitMap, String inputTrackList, String outputPhotonClusterList, String outputHitMap, String mcListName)
+    {
+	System.out.println("WARNING: Cheating on photon finder");
+	// This cheats!
+        add(new ListFilterDriver(new ParticlePDGDecision(22), mcListName, prefix+"MCParticles photons only"));
+        org.lcsim.recon.cluster.cheat.PerfectClusterer myCheatPhotonFinder = new org.lcsim.recon.cluster.cheat.PerfectClusterer();
+        myCheatPhotonFinder.setInputHitMap(inputHitMap);
+        myCheatPhotonFinder.setOutputHitMap(outputHitMap);
+        myCheatPhotonFinder.setOutputClusterList(outputPhotonClusterList);
+        myCheatPhotonFinder.setMCParticleList(prefix+"MCParticles photons only");
+        myCheatPhotonFinder.allowHitSharing(false);
+        add(myCheatPhotonFinder);
+    }
+
     /**
      * Finding photons and MIPs needs to be done carefully, since
      * many photons look like MIPs and vice-versa.
@@ -486,7 +448,7 @@
 	// Check if they match a track (ugly... don't need to make the particles!)
 	{
 	    MIPChargedParticleMaker mipHadID = new MIPChargedParticleMaker();
-	    SimpleTrackMIPClusterMatcher mipMatch = new SimpleTrackMIPClusterMatcher();
+	    LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher();
 	    add(mipMatch);
 	    mipHadID.setTrackMatcher(mipMatch);
 	    mipHadID.setInputTrackList(inputTrackList);
@@ -541,7 +503,7 @@
 	    photonFinder.setOutputClusterList(prefix+"photon clusters (unfiltered)");
 	    add(photonFinder);
 	    //photonFinder.setDebug(true);
-	    //add(new DebugPrintClusterInfo("photon clusters (unfiltered)"));
+	    //add(new DebugPrintClusterInfo(prefix+"photon clusters (unfiltered)"));
 	}
 
 	// ... except some of those "photons" are the start of charged showers,
@@ -549,15 +511,15 @@
 	{
 	    // Check if any "photons" have a track match:
 	    SimpleChargedParticleMaker hadID = new SimpleChargedParticleMaker();
-	    SimpleTrackClusterMatcher clusMatch = new SimpleTrackClusterMatcher();
+	    LocalHelixExtrapolationTrackClusterMatcher clusMatch = new LocalHelixExtrapolationTrackClusterMatcher(); // New track matching
 	    add(clusMatch);
 	    hadID.setTrackMatcher(clusMatch);
 	    hadID.setInputTrackList(inputTrackList);
 	    hadID.setOutputTrackList(prefix+"tracks left over from front-side photon-like showers");
 	    hadID.setInputClusterList(prefix+"photon clusters (unfiltered)");
 	    hadID.setOutputParticleList(prefix+"front-side photon-like charged particles");
-	    //hadID.setDebug(true);
-	    //clusMatch.setDebug(true);
+	    hadID.setDebug(false);
+	    clusMatch.setDebug(false);
 	    add(hadID);
 	    // If so, remove them from the photon list:
 	    ClusterListFilterDriver filterRemoveChargedClusters = new ClusterListFilterDriver();
@@ -713,6 +675,7 @@
     
     protected void addCheatingStructuralLinker(String prefix, String inputClusterList, String inputMIPList, String inputClumpList, String outputClusterList, String outputHitMap, String[] inputHitListsForAssociator, String[] inputClusterListsForAssociator, String mcListName)
     {
+	System.out.println("WARNING: Cheating on likelihood");
 	CheatLikelihoodLinkDriver likelihoodLinker = new CheatLikelihoodLinkDriver(inputClusterList, inputMIPList, inputClumpList, outputClusterList, outputHitMap);
 	likelihoodLinker.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, prefix+"AssocInfo particles -> components", prefix+"AssocInfo components -> particles" );
 	likelihoodLinker.setIgnoreClusterDecision(new ClusterSizeDecision(10));
@@ -728,7 +691,7 @@
     {
 	// First try the MIPs...
 	MIPChargedParticleMaker hadIDmip = new MIPChargedParticleMaker();
-	SimpleTrackMIPClusterMatcher mipMatch = new SimpleTrackMIPClusterMatcher();
+	LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher();
 	add(mipMatch);
 	hadIDmip.setTrackMatcher(mipMatch);
 	hadIDmip.setInputTrackList(trackList);
@@ -742,20 +705,24 @@
 	}
 	hadIDmip.setCheckEoverP(checkEoverP);
 	hadIDmip.setCalibration(calibration);
-	mipMatch.setDebug(debug);
+	hadIDmip.setDebug(debug);
 	add(hadIDmip);
 
 	// Then try the clusters generically:
 	SimpleChargedParticleMaker hadID = new SimpleChargedParticleMaker();
-	SimpleTrackClusterMatcher clusMatch = new SimpleTrackClusterMatcher();
+	LocalHelixExtrapolationTrackClusterMatcher clusMatch = new LocalHelixExtrapolationTrackClusterMatcher();
 	add(clusMatch);
 	hadID.setTrackMatcher(clusMatch);
 	hadID.setInputTrackList(prefix+"tracks minus mip associations");
 	hadID.setOutputTrackList(prefix+"leftover tracks");
 	hadID.setInputClusterList(prefix+"skeletons plus halo minus charged particles from mips");
 	hadID.setOutputParticleList(prefix+"charged hadron particles with non-mip association");
-	clusMatch.setCheckEoverP(checkEoverP);
-	clusMatch.setCalibration(calibration);
+	if (checkEoverP) {
+	    CheckEoverP check = new CheckEoverP(calibration,3.0);
+	    check.setDebug(debug);
+	    clusMatch.setExtraCheck(check);
+	}
+	hadID.setDebug(debug);
 	add(hadID);
 
 	// Merge the two particle lists:
@@ -876,7 +843,8 @@
 				    String smallPhotonParticleList,
 				    String largePhotonParticleList,
 				    ClusterEnergyCalculator calibration,
-				    ClusterEnergyCalculator calibrationForEoverP)
+				    ClusterEnergyCalculator calibrationForEoverP,
+				    boolean trackDebug)
     {
 	// Fragment handling
 	String clusterList = prefix+"clusters after fragment handling";
@@ -885,7 +853,8 @@
 
 	// Repeat the hadron ID step with the revised cluster list after fragment handling
 	String tempChargedParticleList = prefix+"charged hadron particles after fragment handling";
-	addTrackMatcher(prefix+"FindCharged: ", trackList, inputMIPList, clusterList, null, tempChargedParticleList,   true, calibrationForEoverP, false);
+	boolean applyEoverPcut = true;
+	addTrackMatcher(prefix+"FindCharged: ", trackList, inputMIPList, clusterList, null, tempChargedParticleList,   applyEoverPcut, calibrationForEoverP, trackDebug);
 	System.out.println(prefix+": Will write out charged particles as '"+tempChargedParticleList+"'");
 
 	// ... and then any remaining clusters should be neutral
CVSspam 0.2.8