Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
NonTrivialPFA.java+27-131.7 -> 1.8
MJC: Miscellaneous small updates and fixes. Likelihood cuts tightened -- should improve purity of reconstructed particles

lcsim/src/org/lcsim/contrib/uiowa
NonTrivialPFA.java 1.7 -> 1.8
diff -u -r1.7 -r1.8
--- NonTrivialPFA.java	24 Apr 2007 01:14:46 -0000	1.7
+++ NonTrivialPFA.java	26 Apr 2007 17:24:48 -0000	1.8
@@ -177,6 +177,7 @@
             }
 	    
 	    // Choose which track list to use
+	    //trackList = cheatingTrackList;
 	    trackList = nonCheatingTrackList;
 	}
 
@@ -213,7 +214,7 @@
 	    addMIPFinder(prefix, inputHitMapECAL, outputMIPListECAL, outputHitMapECAL); 
 	    addMIPFinder(prefix, inputHitMapHCAL, outputMIPListHCAL, outputHitMapHCAL);
 	    // Merge ECAL & HCAL lists
-	    ListAddDriver<Cluster> mergeMIPs = new ListAddDriver<Cluster>();
+	    ListAddDriver<Cluster> mergeMIPs = new ListAddDriver<Cluster>(Cluster.class);
 	    mergeMIPs.addInputList(outputMIPListECAL);
 	    mergeMIPs.addInputList(outputMIPListHCAL);
 	    mergeMIPs.setOutputList(outputMIPList);
@@ -236,7 +237,7 @@
 	    addClumpFinder(prefix, inputHitMapECAL, outputClumpListECAL, outputHitMapECAL);
 	    addClumpFinder(prefix, inputHitMapHCAL, outputClumpListHCAL, outputHitMapHCAL);
 	    // Merge ECAL & HCAL lists
-	    ListAddDriver<Cluster> mergeClumps = new ListAddDriver<Cluster>();
+	    ListAddDriver<Cluster> mergeClumps = new ListAddDriver<Cluster>(Cluster.class);
 	    mergeClumps.addInputList(outputClumpListECAL);
 	    mergeClumps.addInputList(outputClumpListHCAL);
 	    mergeClumps.setOutputList(outputClumpList);
@@ -278,6 +279,7 @@
 	    LikelihoodEvaluator eval = LikelihoodEvaluator.readFromFile("likelihood.bin");
 	    // Some likelihood quantities need per-event info:
 	    makeEventInfoList(eval);
+	    eval.setDebug(false); // lots of debug printout if enabled!
 
 	    // Link MIPs, clumps
 	    String inputLargeClusterList = "mst clusters linked (>=10 hits)";
@@ -291,15 +293,17 @@
 	    String prefix = "linker: ";
 	    String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
 	    String[] inputClusterListsForAssociator = {"mips", "clumps"};
-	    addStructuralLinker(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1);
+	    //addStructuralLinker(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1);
 	    //addCheatingStructuralLinker(prefix, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1, inputHitListsForAssociator, inputClusterListsForAssociator, mcListName);
-	    //addStructuralLinkerWithPlots(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1, inputHitListsForAssociator, inputClusterListsForAssociator, mcListName);
+	    addStructuralLinkerWithPlots(prefix, eval, inputLargeClusterList, inputMIPList, inputClumpList, skeletonClusterList, unusedHitMap1, inputHitListsForAssociator, inputClusterListsForAssociator, mcListName);
 	    // Book-keeping
 	    accountant.addListOfNamedLists( new String[] { smallClusterList, unusedHitMap1, skeletonClusterList, "photon clusters" } );
 
 	    // Add halo of nearby hits
 	    prefix = "halo: ";
  	    addHaloAssigner(prefix, skeletonClusterList, unusedHitMap1, haloClusterList, unusedHitMap2);
+	    // Book-keeping
+	    accountant.addListOfNamedLists( new String[] { smallClusterList, unusedHitMap2, haloClusterList, "photon clusters" } );
 
 	    // Match to tracks
 	    String tempParticleList = "charged hadron particles";
@@ -309,13 +313,16 @@
 	    // Look for teeny clusters near front of ECAL (soft photons)
 	    prefix = "smallphotonfinder: ";
 	    String smallPhotonParticleList = "small photons (ron calib)";
-	    String haloMinusPhotonClusterList = "skeletons plus halo minus small photons";
+	    String haloMinusPhotonClusterList = "halo minus small photons";
 	    String smallClusterMinusPhotonClusterList = "mst clusters linked (<10 hits) minus small photons";
 	    String smallPhotonClusterList = "small photon clusters";
 	    SimpleFragmentIdentifier fragID = new SimpleFragmentIdentifier(10, 100.0);
 	    fragID.addParticleList(tempParticleList);
 	    addSmallPhotonClusterFinder(prefix, fragID, haloClusterList, smallClusterList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, smallPhotonClusterList);
             addSmallPhotonParticleFinder(prefix+"ron: ",smallPhotonClusterList,smallPhotonParticleList,ronPhotonCalib);
+	    // Book-keeping
+	    accountant.addListOfNamedLists( new String[] { smallClusterMinusPhotonClusterList, unusedHitMap2, haloMinusPhotonClusterList, smallPhotonClusterList, "photon clusters" } );
+	    accountant.addListOfNamedLists( new String[] { smallClusterMinusPhotonClusterList, unusedHitMap2, haloMinusPhotonClusterList, smallPhotonParticleList, "photon clusters" } );
 
 	    // Support classes to merge/handle fragments
 	    SimpleFragmentMerger fragMerge = new SimpleFragmentMerger();
@@ -331,8 +338,11 @@
 	    addHadronFinders("nomerge/adhoc/adhoc: ", "all particles [no frag merge] (adhoc calib)", fragID, fragNoMerge, inputMIPList, haloMinusPhotonClusterList, smallClusterMinusPhotonClusterList, trackList, smallPhotonParticleList, largePhotonParticleList, adHocCalib, adHocCalib);
 
 	    // Make sure lists are accessible to Ron using old names.
-	    add(new ListFilterDriver(new DummyDecisionMakerSingle<ReconstructedParticle>(), "merge/ron/ron: neutral hadron particles", "neutral hadron particles (ron calib)"));
-	    add(new ListFilterDriver(new DummyDecisionMakerSingle<ReconstructedParticle>(), "merge/ron/ron: charged hadron particles after fragment handling", "charged hadron particles 2"));
+	    add(new ListFilterDriver(new DummyDecisionMakerSingle<ReconstructedParticle>(), "merge/ron/ron: neutral hadron particles", "neutral hadron particles (ron calib)", ReconstructedParticle.class));
+	    add(new ListFilterDriver(new DummyDecisionMakerSingle<ReconstructedParticle>(), "merge/ron/ron: charged hadron particles after fragment handling", "charged hadron particles 2", ReconstructedParticle.class));
+
+	    // Book-keeping
+	    accountant.addListOfNamedLists( new String[] { smallPhotonParticleList, largePhotonParticleList, "merge/ron/ron: neutral hadron particles", "merge/ron/ron: charged hadron particles after fragment handling" } );
 	}
 
 	// Make sure the book-keeping is OK:
@@ -683,17 +693,21 @@
 
     protected void addStructuralLinker(String prefix, LikelihoodEvaluator eval, String inputClusterList, String inputMIPList, String inputClumpList, String outputClusterList, String outputHitMap)
     {
-	LikelihoodLinkDriver likelihoodLinker = new LikelihoodLinkDriver(eval, 0.5, 0.5, 0.5, inputClusterList, inputMIPList, inputClumpList, outputClusterList, outputHitMap);
+	double cutTrackTrack = 0.7;
+	double cutTrackClump = 0.7;
+	double cutClumpClump = 1.0;
+	LikelihoodLinkDriver likelihoodLinker = new LikelihoodLinkDriver(eval, cutTrackTrack, cutTrackClump, cutClumpClump, inputClusterList, inputMIPList, inputClumpList, outputClusterList, outputHitMap);
 	likelihoodLinker.setIgnoreClusterDecision(new ClusterSizeDecision(10));
 	add(likelihoodLinker);
     }	
 
     protected void addStructuralLinkerWithPlots(String prefix, LikelihoodEvaluator eval, String inputClusterList, String inputMIPList, String inputClumpList, String outputClusterList, String outputHitMap, String[] inputHitListsForAssociator, String[] inputClusterListsForAssociator, String mcListName)
     {
-        LikelihoodLinkPlotDriver plotter = new LikelihoodLinkPlotDriver(eval, 0.5, 0.5, 0.5, inputClusterList, inputMIPList, inputClumpList, outputClusterList, outputHitMap);
+        LikelihoodLinkPlotDriver plotter = new LikelihoodLinkPlotDriver(eval, 0.7, 0.7, 1.0, inputClusterList, inputMIPList, inputClumpList, outputClusterList, outputHitMap);
 	plotter.initPlots("likelihood.aida");
 	plotter.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, prefix+"AssocInfo particles -> components", prefix+"AssocInfo components -> particles" );
 	plotter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
+	plotter.setDebug(false);
 	add(plotter);
     }
     
@@ -745,7 +759,7 @@
 	add(hadID);
 
 	// Merge the two particle lists:
-	ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>();
+	ListAddDriver<ReconstructedParticle> mergeParticles = new ListAddDriver<ReconstructedParticle>(ReconstructedParticle.class);
 	mergeParticles.addInputList(prefix+"charged hadron particles with mip association");
 	mergeParticles.addInputList(prefix+"charged hadron particles with non-mip association");
 	mergeParticles.setOutputList(outputParticleList);
@@ -775,10 +789,10 @@
 	isSmallPhotonFilter2.setInputList(smallClusterList);
 	isSmallPhotonFilter1.setOutputList(prefix+"small photon clusters 1");
 	isSmallPhotonFilter2.setOutputList(prefix+"small photon clusters 2");
-	isSmallPhotonFilter1.setOutputClusterListFail(smallClusterMinusPhotonClusterList);
-	isSmallPhotonFilter2.setOutputClusterListFail(haloMinusPhotonClusterList);
+	isSmallPhotonFilter1.setOutputClusterListFail(haloMinusPhotonClusterList);
+	isSmallPhotonFilter2.setOutputClusterListFail(smallClusterMinusPhotonClusterList);
 
-	ListAddDriver<Cluster> mergeSmallPhotonClusters = new ListAddDriver<Cluster>();
+	ListAddDriver<Cluster> mergeSmallPhotonClusters = new ListAddDriver<Cluster>(Cluster.class);
 	mergeSmallPhotonClusters.addInputList(prefix+"small photon clusters 1");
 	mergeSmallPhotonClusters.addInputList(prefix+"small photon clusters 2");
 	mergeSmallPhotonClusters.setOutputList(smallPhotonClusterList);
CVSspam 0.2.8