Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa/template on MAIN
NonTrivialPFA.java+91-111.3 -> 1.4
Extended non-trivial PFA to the point where it can make energy sum plots (still uses a few external classes)

lcsim/src/org/lcsim/contrib/uiowa/template
NonTrivialPFA.java 1.3 -> 1.4
diff -u -r1.3 -r1.4
--- NonTrivialPFA.java	27 Jan 2006 23:54:01 -0000	1.3
+++ NonTrivialPFA.java	4 Feb 2006 01:25:31 -0000	1.4
@@ -15,21 +15,29 @@
 {
     public NonTrivialPFA()
     {
-	// Step 0: Set up input hit lists:
+
+	// Step 0: Run digisim
+
+	// CalHitMapDriver is needed by DigiSim
+	add(new org.lcsim.recon.cluster.util.CalHitMapDriver());
+	// DigiSim: SimCalHits -> RawCalHits
+	org.lcsim.digisim.DigiSimDriver digi = new org.lcsim.digisim.DigiSimDriver();
+	add(digi);
+	// RawCalHits -> SimCalorimeterHits
+	add( new org.lcsim.digisim.SimCalorimeterHitsDriver() );
+
+	// Step 1: Set up input hit lists:
 	HitMapDriver hitmapEcal = new HitMapDriver();
-	hitmapEcal.addInputList("EcalBarrHits");
-	hitmapEcal.addInputList("EcalEndcapHits");
+	hitmapEcal.addInputList("EcalBarrDigiHits");
+	hitmapEcal.addInputList("EcalEndcapDigiHits");
 	hitmapEcal.setOutput("input hit map ecal");
 	HitMapDriver hitmapHcal = new HitMapDriver();
-	hitmapHcal.addInputList("HcalBarrHits");
-	hitmapHcal.addInputList("HcalEndcapHits");
+	hitmapHcal.addInputList("HcalBarrDigiHits");
+	hitmapHcal.addInputList("HcalEndcapDigiHits");
 	hitmapHcal.setOutput("input hit map hcal");
 	add(hitmapEcal);
 	add(hitmapHcal);
 
-	// Step 1: Run digisim
-	// [digisim]
-
 	// Find tracks
 	// Output: List<Track> saved as EventHeader.TRACKS
 	add (new org.lcsim.mc.fast.tracking.MCFastTracking());
@@ -91,6 +99,16 @@
 	mstDriverLink.setPairDecision(new BothCalorimetersDecision());
         add(mstDriverLink);
 
+	// OK. Now we are going to weed out the small (<10 hit) clusters.
+	ClusterListFilterDriver sizeFilterDriver = new ClusterListFilterDriver();
+	sizeFilterDriver.setInputDecision(new ClusterSizeDecision(10));
+	sizeFilterDriver.setInputClusterList("mst clusters linked");
+	sizeFilterDriver.setOutputClusterListPass("mst clusters linked (>=10 hits)");
+	sizeFilterDriver.setOutputClusterListFail("mst clusters linked (<10 hits)");
+	sizeFilterDriver.setOutputHitMapPass("hits from mst clusters linked (>=10 hits)");
+	sizeFilterDriver.setOutputHitMapFail("hits from mst clusters linked (<10 hits)");
+	add(sizeFilterDriver);
+
 	boolean writeLikelihood = false;
 	structural.ClusterAssociator assoc = new structural.ClusterEnergyAssociator();
 	if (writeLikelihood) {
@@ -105,8 +123,7 @@
 	  eval.addLikelihoodQuantityClumpToClump(new ClusterToClusterMinDistance(), 20, 0.0, 200.0, false, true);
 	  // Handle things that have per-event info:
 	  makeEventInfoList(eval); 
-	  structural.LikelihoodFindingStructuralDriver likelihoodWriter = new structural.LikelihoodFindingStructuralDriver(eval, assoc, "mst clusters linked", "mips", "clumps");
-	  likelihoodWriter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
+	  structural.LikelihoodFindingStructuralDriver likelihoodWriter = new structural.LikelihoodFindingStructuralDriver(eval, assoc, "mst clusters linked (>=10 hits)", "mips", "clumps");
 	  add(likelihoodWriter);
 	  Driver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
 	  add(checkpoint);
@@ -120,7 +137,7 @@
 	  //   * List of skeleton clusters
 	  //   * Hitmap containing clustered hits that aren't in skeletons
 	  LikelihoodEvaluator eval = LikelihoodEvaluator.readFromFile("likelihood.bin");
-	  structural.LikelihoodLinkPlotterDriver likelihoodPlotter = new structural.LikelihoodLinkPlotterDriver(eval, 0.5, 0.6, 0.8, assoc, "mst clusters linked", "mips", "clumps", "skeletons", "structural unused hits");
+	  structural.LikelihoodLinkPlotterDriver likelihoodPlotter = new structural.LikelihoodLinkPlotterDriver(eval, 0.5, 0.6, 0.8, assoc, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
 	  likelihoodPlotter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
 	  likelihoodPlotter.initPlots("likelihoodPerformance.aida");
 	  add(likelihoodPlotter);
@@ -150,9 +167,59 @@
 	  hadID.setOutputParticleList("charged hadron particles");
 	  add(hadID);
 	  // Step 6d: Handle fragments
+	  // Inputs:
+	  //    * Charged particles ("charged hadron particles")
+	  //    * Skeleton clusters ("skeletons plus halo")
+	  //    * Small clusters ("mst clusters linked (<10 hits)")
+	  //    * Unassigned structural hits, if any ("structural unused hits minus halo")
+	  // Outputs:
+	  //    * Particles (one per merged cluster)
+	  //    * Clusters after merging in fragments ("merged clusters");
+	  //    * Any remaining unassigned hits("leftover hits after fragment merge")
+          FragmentMergeDriver fragMergeDriver = new FragmentMergeDriver();
+	  fragMergeDriver.addInputClusterList("skeletons plus halo");
+	  fragMergeDriver.addInputClusterList("mst clusters linked (<10 hits)");
+	  fragMergeDriver.addInputHitMap("structural unused hits minus halo");
+	  fragMergeDriver.addInputParticleList("charged hadron particles");
+	  fragMergeDriver.setOutputClusterList("clusters with fragments merged");
+	  fragMergeDriver.setOutputHitMap("hits left over after fragment merge");
+	  SimpleFragmentIdentifier fragID = new SimpleFragmentIdentifier();
+	  fragID.addParticleList("charged hadron particles");
+	  fragMergeDriver.setFragmentIdentifier(fragID);
+	  SimpleFragmentMerger fragMerge = new SimpleFragmentMerger();
+	  fragMergeDriver.setFragmentMerger(fragMerge);
+	  // specify fragment identifier, merger.
+          add(fragMergeDriver);
+	  // Repeat the hadron ID step with the revised cluster list:
+	  ChargedHadronIdentifier hadID2 = new ChargedHadronIdentifier();
+	  hadID2.setInputTrackList(EventHeader.TRACKS);
+	  hadID2.setOutputTrackList("leftover tracks 2");
+	  hadID2.setInputMIPList("mips");
+	  hadID2.setInputClusterList("clusters with fragments merged");
+	  hadID2.setOutputParticleList("charged hadron particles 2");
+	  add(hadID2);
+	  // ... and then any remaining clusters should be neutral
+	  ClusterListFilterDriver removeChargedClusters = new ClusterListFilterDriver();
+	  VetoClustersFromParticles vetoCharged = new VetoClustersFromParticles("charged hadron particles 2");
+	  removeChargedClusters.setInputDecision(vetoCharged);
+	  removeChargedClusters.setInputClusterList("clusters with fragments merged");
+	  removeChargedClusters.setOutputClusterListPass("neutral clusters with fragments merged");
+	  add(vetoCharged);
+	  add(removeChargedClusters);
+	  NeutralHadronIdentifier hadID3 = new NeutralHadronIdentifier();
+	  hadID3.setInputClusterList("neutral clusters with fragments merged");
+	  hadID3.setOutputParticleList("neutral hadron particles");
+	  add(hadID3);
+	  // Merge together all the particle lists:
+	  ListMerger<ReconstructedParticle> mergeParticles = new ListMerger<ReconstructedParticle>();
+	  mergeParticles.addInputList("charged hadron particles 2");
+	  mergeParticles.addInputList("neutral hadron particles");
+	  mergeParticles.setOutputList("hadron particles");
+	  add(mergeParticles);
 	  // Step 6e: Plots
 	}
 
+	/*
 	add(new DebugInfoHitMap("input hit map ecal"));
 	add(new DebugInfoHitMap("input hit map hcal"));
 	add(new DebugInfoHitMap("hit map ecal without mips"));
@@ -161,8 +228,11 @@
 	add(new DebugInfoHitMap("hit map hcal without mips or clumps"));
 	add(new DebugInfoHitMap("ecal hit map after mst"));
 	add(new DebugInfoHitMap("hcal hit map after mst"));
+	add(new DebugInfoHitMap("hits from mst clusters linked (>=10 hits)"));
+	add(new DebugInfoHitMap("hits from mst clusters linked (<10 hits)"));
 	add(new DebugInfoHitMap("structural unused hits"));
 	add(new DebugInfoHitMap("structural unused hits minus halo"));
+	add(new DebugInfoHitMap("hits left over after fragment merge"));
 
 	add(new DebugInfoClusterList("mips ecal"));
 	add(new DebugInfoClusterList("mips hcal"));
@@ -173,13 +243,23 @@
 	add(new DebugInfoClusterList("mst clusters ecal"));
 	add(new DebugInfoClusterList("mst clusters hcal"));
 	add(new DebugInfoClusterList("mst clusters linked"));
+	add(new DebugInfoClusterList("mst clusters linked (>=10 hits)"));
+	add(new DebugInfoClusterList("mst clusters linked (<10 hits)"));
 	add(new DebugInfoClusterList("skeletons"));
 	add(new DebugInfoClusterList("skeletons plus halo"));
+	add(new DebugInfoClusterList("clusters with fragments merged"));
+	add(new DebugInfoClusterList("neutral clusters with fragments merged"));
 
 	add(new DebugInfoTrackList(EventHeader.TRACKS));
 	add(new DebugInfoTrackList("leftover tracks"));
+	add(new DebugInfoTrackList("leftover tracks 2"));
 	
 	add(new DebugInfoParticleList("charged hadron particles"));
+	add(new DebugInfoParticleList("charged hadron particles 2"));
+	add(new DebugInfoParticleList("neutral hadron particles"));
+	*/
+	add(new DebugInfoParticleList("hadron particles"));
+	add(new EnergySumPlotter("hadron particles", "test.aida"));
     }
 
     public void process(EventHeader event) {
CVSspam 0.2.8