lcsim/src/org/lcsim/contrib/uiowa/template
diff -u -r1.7 -r1.8
--- NonTrivialPFA.java 18 Aug 2006 00:13:55 -0000 1.7
+++ NonTrivialPFA.java 24 Oct 2006 17:20:51 -0000 1.8
@@ -45,7 +45,14 @@
// I think this is needed for Ron's analysis:
private AIDA aida = AIDA.defaultInstance();
- public NonTrivialPFA()
+ // Default is to read likelihood info in and use it to
+ // reconstruct clusters. Alternative is to create new
+ // likelihood output.
+ public NonTrivialPFA() {
+ this(false);
+ }
+
+ public NonTrivialPFA(boolean writeLikelihood)
{
// Book-keeping
HitBookKeeper accountant = new HitBookKeeper();
@@ -283,7 +290,6 @@
accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "mst clusters linked (<10 hits)", "photon particles" } );
accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "hits from mst clusters linked (>=10 hits)", "hits from mst clusters linked (<10 hits)", "photon particles" } );
- boolean writeLikelihood = false;
if (writeLikelihood) {
// Step 6a: Make likelihood PDFs
LikelihoodEvaluator eval = new LikelihoodEvaluator();
@@ -301,7 +307,8 @@
String[] inputClusterListsForAssociator = {"mips", "clumps"};
likelihoodWriter.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
add(likelihoodWriter);
- Driver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
+ LikelihoodEvaluatorCheckpointDriver checkpoint = new LikelihoodEvaluatorCheckpointDriver(eval, 10);
+ checkpoint.setVerbose(true);
add(checkpoint);
} else {
@@ -317,13 +324,11 @@
// Some likelihood quantities need per-event info:
makeEventInfoList(eval);
- //structural.LikelihoodLinkPlotterDriver likelihoodPlotter = new structural.LikelihoodLinkPlotterDriver(eval, 0.5, 0.6, 0.8, assoc, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
- //likelihoodPlotter.setIgnoreClusterDecision(new ClusterSizeDecision(10));
- //likelihoodPlotter.initPlots("likelihoodPerformance.aida");
- //add(likelihoodPlotter);
-
+ // Set up linker:
org.lcsim.recon.cluster.structural.LikelihoodLinkDriver likelihoodLinker = null;
- if (true) {
+ boolean cheatOnLinker = false;
+ boolean makePlotsWhenNotCheating = false;
+ if (cheatOnLinker) {
// cheat
likelihoodLinker = new org.lcsim.recon.cluster.structural.CheatLikelihoodLinkDriver("mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
String[] inputHitListsForAssociator = {"EcalBarrDigiHits", "EcalEndcapDigiHits", "HcalBarrDigiHits", "HcalEndcapDigiHits"};
@@ -331,7 +336,7 @@
likelihoodLinker.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
} else {
// don't cheat
- if (false) {
+ if (!makePlotsWhenNotCheating) {
// no plots
likelihoodLinker = new org.lcsim.recon.cluster.structural.LikelihoodLinkDriver(eval, 0.5, 0.5, 0.5, "mst clusters linked (>=10 hits)", "mips", "clumps", "skeletons", "structural unused hits");
} else {
@@ -344,6 +349,7 @@
likelihoodLinker.initializeClusterAssociator( inputHitListsForAssociator, inputClusterListsForAssociator, mcListName, "AssocInfo particles -> components", "AssocInfo components -> particles" );
}
}
+ // finish setting up linker
likelihoodLinker.setIgnoreClusterDecision(new ClusterSizeDecision(10));
add(likelihoodLinker);
accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "skeletons", "structural unused hits", "mst clusters linked (<10 hits)", "photon particles" } );
@@ -410,8 +416,8 @@
fragMergeDriver.setFragmentIdentifier(fragID); // don't cheat
accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
- // What if we want to cheat? Set it up...
- {
+ boolean cheatOnFragmentMerge = false;
+ if (cheatOnFragmentMerge) {
// Here are our cluster lists:
// "skeletons plus halo plus orphan mips"
// "mst clusters linked (<10 hits) minus orphan mips"
@@ -431,16 +437,16 @@
String[] clusterListNames = { "skeletons plus halo", "mst clusters linked (<10 hits)" } ;
org.lcsim.recon.cluster.structural.CheatFragmentIdentifier cheatFragID = new org.lcsim.recon.cluster.structural.CheatFragmentIdentifier(hitListNames, clusterListNames, mcListName, "CheatFragmentIdentifier info P->C", "CheatFragmentIdentifier info C->P");
fragMergeDriver.setFragmentIdentifier(cheatFragID); // cheat
+ org.lcsim.recon.cluster.structural.CheatFragmentMerger cheatFragMerge = new org.lcsim.recon.cluster.structural.CheatFragmentMerger();
+ cheatFragMerge.initializeAssociator("TmpHitList", "TmpClusterList", mcListName, "TmpMCCList", "TmpCMCList");
+ fragMergeDriver.setFragmentMerger(cheatFragMerge); // cheat [broken?]
+ add(cheatFragMerge);
+ } else {
+ org.lcsim.recon.cluster.structural.SimpleFragmentMerger fragMerge = new org.lcsim.recon.cluster.structural.SimpleFragmentMerger();
+ fragMergeDriver.setFragmentMerger(fragMerge); // don't cheat
+ add(fragMergeDriver);
}
- org.lcsim.recon.cluster.structural.SimpleFragmentMerger fragMerge = new org.lcsim.recon.cluster.structural.SimpleFragmentMerger();
- fragMergeDriver.setFragmentMerger(fragMerge); // don't cheat
- org.lcsim.recon.cluster.structural.CheatFragmentMerger cheatFragMerge = new org.lcsim.recon.cluster.structural.CheatFragmentMerger();
- cheatFragMerge.initializeAssociator("TmpHitList", "TmpClusterList", mcListName, "TmpMCCList", "TmpCMCList");
- add(cheatFragMerge);
- //fragMergeDriver.setFragmentMerger(cheatFragMerge); // cheat [broken?]
- add(fragMergeDriver);
-
accountant.addListOfNamedLists( new String[] { "ecal hit map after mst", "hcal hit map after mst", "clusters with fragments merged", "hits left over after fragment merge", "photon particles" } );
// Repeat the hadron ID step with the revised cluster list:
@@ -527,8 +533,11 @@
add(adder);
// Make plots:
+
CorrectedEnergySumPlotter recoPlotter = new CorrectedEnergySumPlotter("converted all", "all particles", mcListName, "reco-corrected.aida");
+ //CorrectedEnergySumPlotter recoPlotter = new org.lcsim.recon.pfa.output.MatCalibrationPlots("converted all", "all particles", mcListName, "reco-corrected.aida");
add(recoPlotter);
+ add(new EnergySumPlotter("all particles", "reco-uncorrected.aida"));
// Make plots with Ron's ClusterAnalysis routine:
// all particles = "charged hadron particles 2" + "neutral hadron particles" + "photon particles"
@@ -576,7 +585,13 @@
// This is a clumsy way of doing a veto.
// Require >= 84 GeV in barrel:
- if (checkEnergyBarrel(event, 0.923)) {
+ //if (checkEnergyBarrel(event, 0.923)) {
+ //super.process(event);
+ //}
+
+ // Require in barrel (i.e. primary qqbar pair has
+ // |cos(theta)| < 1/sqrt(2)):
+ if (angleCheckLei(event, 0.707106781)) {
super.process(event);
}
}
@@ -591,6 +606,49 @@
super.suspend();
}
+ protected boolean angleCheckLei(EventHeader event, double threshold)
+ {
+ // Require that the primary qqbar pair has |cos(theta)| < threshold
+ // where theta is the polar angle, with theta=0 being the beam direction.
+ // Lei suggests a cut of |cos(theta)| < sqrt(0.5)
+ MCParticle mostEnergeticQuark = null;
+ MCParticle mostEnergeticAntiQuark = null;
+ List<MCParticle> mcParticles = event.getMCParticles();
+ for (MCParticle part : mcParticles) {
+ int pdg = part.getPDGID();
+ if (pdg>0 && pdg<7) {
+ if (mostEnergeticQuark==null || part.getEnergy() > mostEnergeticQuark.getEnergy()) {
+ mostEnergeticQuark = part;
+ }
+ } else if (pdg<0 && pdg>-7) {
+ if (mostEnergeticAntiQuark==null || part.getEnergy() > mostEnergeticAntiQuark.getEnergy()) {
+ mostEnergeticAntiQuark = part;
+ }
+ }
+ }
+ if (mostEnergeticQuark==null || mostEnergeticAntiQuark==null) {
+ System.out.println("No quark/antiquark!");
+ System.out.println("mostEnergeticQuark="+mostEnergeticQuark);
+ System.out.println("mostEnergeticAntiQuark="+mostEnergeticAntiQuark);
+ System.out.println("failing this event...");
+ return false;
+ } else {
+ double cosThetaQuark = mostEnergeticQuark.getMomentum().z() / mostEnergeticQuark.getMomentum().magnitude();
+ double cosThetaAntiQuark = mostEnergeticAntiQuark.getMomentum().z() / mostEnergeticAntiQuark.getMomentum().magnitude();
+ boolean eventCutPassedQuark = (Math.abs(cosThetaQuark) < threshold);
+ boolean eventCutPassedAntiQuark = (Math.abs(cosThetaAntiQuark) < threshold);
+ if (eventCutPassedQuark != eventCutPassedAntiQuark) {
+ System.out.println("Quark-antiquark ambiguity!");
+ System.out.println("mostEnergeticQuark has pdg="+mostEnergeticQuark.getPDGID()+" and energy "+mostEnergeticQuark.getEnergy()+" and momentum=("+mostEnergeticQuark.getMomentum().x()+","+mostEnergeticQuark.getMomentum().y()+","+mostEnergeticQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
+ System.out.println("mostEnergeticAntiQuark pdg="+mostEnergeticAntiQuark.getPDGID()+" and energy "+mostEnergeticAntiQuark.getEnergy()+" and momentum=("+mostEnergeticAntiQuark.getMomentum().x()+","+mostEnergeticAntiQuark.getMomentum().y()+","+mostEnergeticAntiQuark.getMomentum().z()+") and cosTheta="+cosThetaQuark);
+ System.out.println("failing this event...");
+ return false;
+ } else {
+ return eventCutPassedQuark;
+ }
+ }
+ }
+
protected boolean checkEnergyBarrel(EventHeader event, double threshold)
{
// Require that threshold (e.g. 90%) of the final-state particles are within the barrel ( cos(theta) < 0.8 )