lcsim/src/org/lcsim/recon/pfa/structural
diff -u -r1.25 -r1.26
--- ReclusterDTreeDriver.java 19 Oct 2009 16:22:13 -0000 1.25
+++ ReclusterDTreeDriver.java 22 Jan 2010 13:56:32 -0000 1.26
@@ -37,20 +37,21 @@
* in this package, which uses the implementation in
* org.lcsim.recon.cluster.directedtree developed by NIU).
*
- * @version $Id: ReclusterDTreeDriver.java,v 1.25 2009/10/19 16:22:13 tjkim Exp $
+ * @version $Id: ReclusterDTreeDriver.java,v 1.26 2010/01/22 13:56:32 pahl Exp $
* @author Mat Charles <[log in to unmask]>
*/
public class ReclusterDTreeDriver extends ReclusterDriver {
-
protected String m_dTreeClusterListName;
protected String m_muonTrackClusterMapName;
+ protected double m_impactParameterCut2nd= 960.; // cut from tune
+
protected boolean m_cheatOnPhotonsMisidentifiedAsNeutralHadrons = false;
protected boolean m_cheatOnHadronsMisidentifiedAsPhotons = false;
protected boolean m_cheatOnPhotons = false;
- protected boolean m_muonDebug = true;
+ protected boolean m_muonDebug = false;
protected boolean m_electronDebug = false;
protected boolean m_photonDebug = false;
protected boolean m_photonSplitDebug = false;
@@ -89,6 +90,7 @@
protected boolean m_fixSingleTracksWithCone = true;
protected boolean m_fixJetsWithCone = true;
+
protected boolean m_useSimpleConeForReassignment = false;
protected double m_minScoreForReassignment = 0.7;
@@ -128,6 +130,10 @@
public void setMuonTrackClusterMap(String muonTrackClusterMap) { m_muonTrackClusterMapName = muonTrackClusterMap;}
public void setOutputParticleListName(String outputParticleListName) { m_outputParticleListName = outputParticleListName;}
+ public void setImpactParameterCut2nd(double impactParameterCut2nd) { m_impactParameterCut2nd = impactParameterCut2nd;
+}
+ public void setDebug(boolean debug) { m_debug = debug; }
+
public void writeExtraEventOutput(boolean writeExtra) {
m_writeExtraEventOutput = writeExtra;
}
@@ -149,6 +155,7 @@
m_muonTrackClusterMapName = muonTrackClusterMap;
}
+ public MIPGeometryHandler geomHandler;
public void process(EventHeader event) {
super.debugProcess(event);
m_event = event;
@@ -376,7 +383,8 @@
if (m_debug) {
debugPrintTrackInfo(trackList, unmatchedTracks, tracksMatchedToClusters, tracksSortedByMomentum);
- }
+ }
+
// Prep for linking
List<Cluster> linkableClustersExcludingPhotons = new Vector<Cluster>();
@@ -407,7 +415,7 @@
BasicCluster tmpClus = new BasicCluster();
tmpClus.addHit(hit);
treeOfSharedCluster.put(tmpClus, tree);
- leftoverHitClustersToShare.add(tmpClus);
+ leftoverHitClustersToShare.add(tmpClus);
boolean hitECAL = (allHitsEcalBarrel.contains(hit) || allHitsEcalEndcap.contains(hit));
boolean hitHCAL = (allHitsHcalBarrel.contains(hit) || allHitsHcalEndcap.contains(hit));
boolean hitMCAL = (m_useMucalBarrel && allHitsMcalBarrel.contains(hit)) || (m_useMucalEndcap && allHitsMcalEndcap.contains(hit));
@@ -468,7 +476,8 @@
initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
- initPotentialLinks_Cone(seeds, linkableClustersExcludingPhotons, allHits, tracksMatchedToClusters, clustersMatchedToTracks, 0.95, 0.9);
+ // 1st cone:
+ initPotentialLinks_Cone(seeds, linkableClustersExcludingPhotons, allHits, tracksMatchedToClusters, clustersMatchedToTracks, 0.95, 0.9);
}
// Done making links. Prep & build skeletons:
@@ -654,7 +663,7 @@
mapName = "ShowerFinderMapTrackToMip";
}
Map<Track, BasicCluster> mapTrackToMIP = (Map<Track, BasicCluster>) (m_event.get(mapName));
- MIPGeometryHandler geomHandler = new LayerBasedMIPGeometryHandler(mapTrackToMIP, m_findCluster);
+ geomHandler = new LayerBasedMIPGeometryHandler(mapTrackToMIP, m_findCluster);
//MIPGeometryHandler geomHandler = new HelixTangentMIPGeometryHandler(mapTrackToMIP, m_findCluster);
if (m_useSimpleConeForReassignment) {
@@ -667,7 +676,7 @@
for (Track tr : tracksSortedByMomentum) {
// Only process tracks that aren't part of a jet:
Set<Track> jetOfTrack = mapTrackToJet.get(tr);
- if (jetOfTrack == null) {
+ if (jetOfTrack == null) {
checkTrackForReassignments(tr, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, unassignedClusters, newMapTrackToTolerance.get(tr), algorithm);
}
}
@@ -679,7 +688,8 @@
}
if (m_debug) {
- printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, photons, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
+ System.out.println("DEBUG: number of tracks: " + tracksSortedByMomentum.size());
+ printStatus(" FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, photons, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
}
if (m_tj_debug){
@@ -814,17 +824,33 @@
protected List<Cluster> reassignClustersToJet (Set<Track> jet, Collection<Cluster> initialShower, Collection<Cluster> unassignedClusters, List<SharedClusterGroup> allSharedClusters, double tolerance, ReassignClustersAlgorithm reassignAlgorithm, Map<Track, Set<Cluster>> newMapTrackToShowerComponents)
{
+
// Truth info debug
+
+ Hep3Vector showerPoint=new BasicHep3Vector();
if (m_debug) {
List<MCParticle> mcList = m_event.get(MCParticle.class, m_mcList);
System.out.println("DEBUG: Looking for clusters for the following tracks:");
for (Track tr : jet) {
- String printme = new String(" * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" with contributions from truth particles: ");
- List<MCParticle> truthList = getMCParticle(tr);
- for (MCParticle part : truthList) {
- printme += "[" + part.getPDGID() + " with p="+ part.getMomentum().magnitude()+"]";
- }
- System.out.println(printme);
+ String printme = new String(" * Cluster searching for track with p = x, y, z = "+
+ String.format("%.2f",(new BasicHep3Vector(tr.getMomentum())).magnitude())+" = "+
+ String.format("%.2f",(new BasicHep3Vector(tr.getMomentum())).x() )+", "+
+ String.format("%.2f",(new BasicHep3Vector(tr.getMomentum())).y() )+", "+
+ String.format("%.2f",(new BasicHep3Vector(tr.getMomentum())).z() ) );
+ if (tr.getTracks().size()==0) {
+ showerPoint=geomHandler.getShowerPoint(tr);
+ printme += " with shower point's x, y, z = "+
+ String.format("%.1f",showerPoint.x())+", "+
+ String.format("%.1f",showerPoint.y())+", "+
+ String.format("%.1f",showerPoint.z());
+ }
+
+ printme += " with contributions from truth particles: ";
+ List<MCParticle> truthList = getMCParticle(tr);
+ for (MCParticle part : truthList) {
+ printme += "[" + part.getPDGID() + " with p="+ String.format("%.2f",part.getMomentum().magnitude())+"]";
+ }
+ System.out.println(printme);
}
}
@@ -855,26 +881,54 @@
if (m_debug) { System.out.println("Starting cluster loop"); }
double startingEnergy = energy(initialShower, allSharedClusters);
double trackMomentum = jetScalarMomentum(jet);
- if (m_debug) { System.out.println("original E= " + startingEnergy + " P= " + trackMomentum + " E/P= " + (startingEnergy/trackMomentum)); }
+
+ if (m_debug) { System.out.format("original E= %.2f P= %.2f E/P= %.2f\n",
+ startingEnergy, trackMomentum, startingEnergy/trackMomentum); }
+
Iterator iter = clustersSortedByAngle.entrySet().iterator();
boolean previousIterationOK = false;
+
+ boolean matching=false;
+
while(iter.hasNext()) {
// Basics
Map.Entry entry = (Map.Entry) iter.next();
Double key = (Double)entry.getKey();
Cluster value = (Cluster)entry.getValue();
- if (m_debug) {
- // Truth information check
+
+ double impactParameter=0;
+ if ( value.getCalorimeterHits().size() >= 4){
+ TrackToPointDOCA ttpdoca=new TrackToPointDOCA();
+ impactParameter=ttpdoca.evaluate(value,showerPoint);
+ } else
+ impactParameter=0; // Ok as only upper cut applied
+
+ double distance=Math.sqrt( (showerPoint.x()-value.getPosition()[0])*(showerPoint.x()-value.getPosition()[0])
+ +(showerPoint.y()-value.getPosition()[1])*(showerPoint.y()-value.getPosition()[1])
+ +(showerPoint.z()-value.getPosition()[2])*(showerPoint.z()-value.getPosition()[2]) );
+ if (m_debug) { // Truth information check
List<MCParticle> mcList = m_event.get(MCParticle.class, m_mcList);
- Map<MCParticle, List<SimCalorimeterHit>> truthMap = truthInListFromHitList(value.getCalorimeterHits(), mcList);
+ Map<MCParticle, List<SimCalorimeterHit>> truthMap =
+ truthInListFromHitList(value.getCalorimeterHits(), mcList);
String printme = new String("DEBUG: Cluster with "+value.getCalorimeterHits().size()+" hits at cone angle of "+key);
printme += " has contributions from:";
for (MCParticle part : truthMap.keySet()) {
List<SimCalorimeterHit> hits = truthMap.get(part);
- printme += " [" + hits.size() + " hits from " + part.getPDGID()+" with p="+part.getMomentum().magnitude()+"]";
+ printme += " [" + hits.size() + " hits from " + part.getPDGID()+" with p="+
+ String.format("%.2f",part.getMomentum().magnitude())+"]";
+
+ for (Track tr : jet) {
+ List<MCParticle> truthList = getMCParticle(tr);
+ for (MCParticle trParticle : truthList)
+ if (trParticle==part) // true from minimal matching corresponds to
+ matching=true; // usual aggressive cone algorithm-behaviour
+ }
}
System.out.println(printme);
+
+ if (matching) {System.out.println("matching!");}else{System.out.println("not matching!");}
}
+
// Algorithm
double energyBefore = energy(initialClustersPlusAdditions, allSharedClusters);
boolean passesEoverPbefore = testEoverP_twoSided(energyBefore, trackMomentum, tolerance);
@@ -883,35 +937,42 @@
boolean passesEoverPafter = testEoverP_twoSided(energyAfter, trackMomentum, tolerance);
boolean passesEoverPafterTight = testEoverP_twoSided(energyAfter, trackMomentum, 1.0);
boolean punchThrough = isPunchThrough(initialClustersPlusAdditions, allSharedClusters);
- if (passesEoverPafter) {
+
+ if (impactParameter > m_impactParameterCut2nd){
+ }
+ else if (passesEoverPafter) {
// OK -- satisfies E/p now
- if (m_debug) { System.out.println("DEBUG: E/p now passes... E is "+energyBefore+" -> "+energyAfter); }
+ if (m_debug) { System.out.format("DEBUG: E/p now passes... E is %.2f -> %.2f\n", energyBefore, energyAfter); }
if (!passesEoverPafterTight && energyAfter > trackMomentum && previousIterationOK) {
- // ... but we're running over the safe edge and we were OK before => stop and don't add it
+ if (m_debug)
+ System.out.println("DEBUG: E... but we're running over the safe edge and we were OK before => stop and don't add it");
previousIterationOK = true;
break;
} else {
// Add this cluster and be willing to add more
- if (m_debug) { System.out.println("DEBUG: Add this cluster and be willing to add more "+energyBefore+" -> "+energyAfter); }
- previousIterationOK = true;
+ if (m_debug)
+ System.out.format("DEBUG: Add this cluster and be willing to add more %.2f -> %.2f\n", energyBefore, energyAfter);
output.add(value);
+ previousIterationOK = true;
}
} else if (!passesEoverPafter && energyAfter < trackMomentum) {
previousIterationOK = true;
if (punchThrough) {
// Raw energy < target, but punches through so that's OK -- stop
- if (m_debug) { System.out.println("DEBUG: Punch-through now passes... E is "+energyBefore+" -> "+energyAfter); }
+ if (m_debug)
+ System.out.println("DEBUG: Punch-through now passes... E is "+energyBefore+" -> "+energyAfter);
output.add(value);
} else {
// Need more energy -- add cluster and keep going
- if (m_debug) { System.out.println("DEBUG: Need more energy "+energyBefore+" -> "+energyAfter); }
- output.add(value);
+ if (m_debug)
+ System.out.println("DEBUG: Need more energy "+energyBefore+" -> "+energyAfter);
+ output.add(value);
}
} else {
- if (m_debug) { System.out.println("DEBUG: Stopping because now E>>p...E is "+energyBefore+" -> "+energyAfter); }
+ if (m_debug) { System.out.format("DEBUG: Stopping because now E>>p...E is %.2f -> %.2f\n", energyBefore, energyAfter); }
// We over-shot -- now E is too large.
// Don't add this cluster.
- if (previousIterationOK) {
+ if (previousIterationOK) {
// We were OK before -- just don't add this cluster
break;
} else {
@@ -919,7 +980,8 @@
return null;
}
}
- if (m_debug) { System.out.println("DEBUG: Step: key= " + key + " energy= " + energyAfter + " E/P= " + (energyAfter/trackMomentum)); }
+ if (m_debug) { System.out.format("DEBUG: Step: key= %.4f energy= %.2f E/P= %.2f\n",
+ key, energyAfter, (energyAfter/trackMomentum)); }
}
if (!previousIterationOK) {
lcsim/src/org/lcsim/recon/pfa/structural
diff -u -r1.20 -r1.21
--- ReclusterDriver.java 19 Oct 2009 15:56:45 -0000 1.20
+++ ReclusterDriver.java 22 Jan 2010 13:56:32 -0000 1.21
@@ -3,6 +3,7 @@
import org.lcsim.util.swim.HelixSwimmer;
import java.util.*;
import java.io.IOException;
+import java.math.BigDecimal;
import hep.aida.*;
import hep.physics.vec.*;
import hep.physics.particle.properties.*;
@@ -38,10 +39,10 @@
* clustering has already been done. See arguments to
* the constructor for details.
*
- * This version is superseded by ReclusterDTreeDriver,
+ * This version is superseeded by ReclusterDTreeDriver,
* which derives from it.
*
- * @version $Id: ReclusterDriver.java,v 1.20 2009/10/19 15:56:45 tjkim Exp $
+ * @version $Id: ReclusterDriver.java,v 1.21 2010/01/22 13:56:32 pahl Exp $
* @author Mat Charles <[log in to unmask]>
*/
@@ -54,6 +55,11 @@
m_debugLinkScores = debug;
m_debugEoverP = debug;
}
+
+ protected double m_impactParameterCut1st = 0; // effectively no cut on: distance - impact parameter
+ public void setImpactParameterCut1st(double impactParameterCut1st) { m_impactParameterCut1st = impactParameterCut1st; }
+ protected double m_wayback1st = 800;
+ public void setWayback1st(double wayback1st) { m_wayback1st = wayback1st; }
boolean m_useOldCalibration = false;
boolean m_useAnalogHcalCalibration = false;
@@ -101,6 +107,8 @@
protected ReclusterDriver() {
// Gah, debug only!
}
+
+ public MIPGeometryHandler geomHandler;
public ReclusterDriver(String mcList, String trackList, String muonParticles, String photonClusters, String skeletonClusters, String smallClusters, String unusedHits, String largeClusters, String mips, String clumps, String splitSkeletonClusters, LikelihoodEvaluator eval) {
System.out.println("ReclusterDriver version 0.1 -- DEPRECATED");
@@ -723,7 +731,7 @@
}
- if (m_debug) { printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, photonClusters, mips, clumps, largeClustersWithoutSkeletons, smallClusterSeeds, newMapTrackToVetoedAdditions); }
+ if (m_debug) { printStatus(" FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, photonClusters, mips, clumps, largeClustersWithoutSkeletons, smallClusterSeeds, newMapTrackToVetoedAdditions); }
long timeAfterFinalStatusPrintout = Calendar.getInstance().getTimeInMillis(); // DEBUG
if (m_debugTiming) { System.out.println("DEBUG: Time to print out final status took "+(timeAfterFinalStatusPrintout-timeAtEndOfLastEventIteration)+" ms"); }
////////////////////////////////////////////////////////////////////////
@@ -1056,12 +1064,11 @@
// Set up the algorithm which will calculate the cone angle
ReassignClustersAlgorithm mipAlg = null;
if (m_useOldReassignmentAlgorithmForConeScore) {
- mipAlg = new PreShowerMIPReassignmentAlgorithm(m_event, 1.0, mapName);
- } else {
+ mipAlg = new PreShowerMIPReassignmentAlgorithm(m_event, 1.0, mapName); } else {
Map<Track, BasicCluster> mapTrackToMIP = (Map<Track, BasicCluster>) (m_event.get(mapName));
- MIPGeometryHandler geomHandler = new LayerBasedMIPGeometryHandler(mapTrackToMIP, m_findCluster);
+ geomHandler = new LayerBasedMIPGeometryHandler(mapTrackToMIP, m_findCluster);
//MIPGeometryHandler geomHandler = new HelixTangentMIPGeometryHandler(mapTrackToMIP, m_findCluster);
- mipAlg = new ConeMIPReassignmentAlgorithm(geomHandler, 800.0, Math.PI*0.5);
+ mipAlg = new ConeMIPReassignmentAlgorithm(geomHandler, m_wayback1st, Math.PI*0.5);
}
// Loop over possible links and calculate scores
@@ -1101,12 +1108,33 @@
// Compute cone and score it
Double coneAngle = mipAlg.computeFigureOfMerit(tr, clus);
+
+ String printme = new String();
+ Hep3Vector showerPoint=new BasicHep3Vector();
+ if (tr.getTracks().size()==0) {
+ showerPoint=geomHandler.getShowerPoint(tr);
+ printme += " with shower point's x, y, z = "+
+ String.format("%.1f",showerPoint.x())+", "+
+ String.format("%.1f",showerPoint.y())+", "+
+ String.format("%.1f",showerPoint.z())+"\n";
+ }
+
+ TrackToPointDOCA ttpdoca;
if (coneAngle == null) {
// Extrapolation failed or angle way too large
// Skip this link
continue;
} else {
double coneAngleCosTheta = Math.cos(coneAngle);
+
+ double impactParameter=0;
+ if ( clus.getCalorimeterHits().size() >= 4){
+ ttpdoca=new TrackToPointDOCA();
+ impactParameter=ttpdoca.evaluate(clus,showerPoint);
+ }
+ double distance=Math.sqrt( (showerPoint.x()-clus.getPosition()[0])*(showerPoint.x()-clus.getPosition()[0])
+ +(showerPoint.y()-clus.getPosition()[1])*(showerPoint.y()-clus.getPosition()[1])
+ +(showerPoint.z()-clus.getPosition()[2])*(showerPoint.z()-clus.getPosition()[2]) );
// Compute score based on scales
// Score drops linearly from (1.0 to 0.7) over the range (1.0, scaleOK)
// Score drops linearly from (0.7 to 0.0) over the range (scaleOK, scaleMin)
@@ -1136,31 +1164,35 @@
if (newLinkValid) {
if (!linkAlreadyExists) {
// No pre-existing link
- addPotentialLink(seed, clus, score);
- } else {
+
+ if (impactParameter < distance-m_impactParameterCut1st){
+ addPotentialLink(seed, clus, score);
+ }
+ }else{
// Pre-existing link
double oldScore = previousLink.score();
if (score > oldScore) {
// New link is better!
- // Replace old one.
- List<ScoredLink> list1 = m_potentialLinks.get(seed);
- List<ScoredLink> list2 = m_potentialLinks.get(clus);
- list1.remove(previousLink);
- list2.remove(previousLink);
- // Check that it was removed OK
- boolean linkExistsAfterRemove1 = checkForLink(seed, clus);
- boolean linkExistsAfterRemove2 = checkForLink(clus, seed);
- if (linkExistsAfterRemove1) { throw new AssertionError("Book-keeping error!"); }
- if (linkExistsAfterRemove2) { throw new AssertionError("Book-keeping error!"); }
- // Add new link
- addPotentialLink(clus, seed, score);
- // Check that it was added OK
- boolean linkExistsAfterAdd1 = checkForLink(seed, clus);
- boolean linkExistsAfterAdd2 = checkForLink(clus, seed);
- if (!linkExistsAfterAdd1) { throw new AssertionError("Book-keeping error!"); }
- if (!linkExistsAfterAdd2) { throw new AssertionError("Book-keeping error!"); }
- }
- }
+
+ if (impactParameter < distance-m_impactParameterCut1st){
+ // Replace old one.
+ List<ScoredLink> list1 = m_potentialLinks.get(seed);
+ List<ScoredLink> list2 = m_potentialLinks.get(clus);
+ list1.remove(previousLink);
+ list2.remove(previousLink);
+ // Check that it was removed OK
+ boolean linkExistsAfterRemove1 = checkForLink(seed, clus);
+ boolean linkExistsAfterRemove2 = checkForLink(clus, seed);
+ if (linkExistsAfterRemove1) { throw new AssertionError("Book-keeping error!"); }
+ if (linkExistsAfterRemove2) { throw new AssertionError("Book-keeping error!"); }
+ // Add new link
+ addPotentialLink(clus, seed, score);
+ // Check that it was added OK
+ boolean linkExistsAfterAdd1 = checkForLink(seed, clus);
+ boolean linkExistsAfterAdd2 = checkForLink(clus, seed);
+ if (!linkExistsAfterAdd1) { throw new AssertionError("Book-keeping error!"); }
+ if (!linkExistsAfterAdd2) { throw new AssertionError("Book-keeping error!"); }
+ } } }
}
}
}
@@ -2155,6 +2187,7 @@
}
}
}
+
protected class MomentumSort implements Comparator<Track> {
public MomentumSort() {}
public int compare(Track t1, Track t2) {
@@ -2171,6 +2204,7 @@
}
}
}
+
private class ScoredLinkSort implements Comparator<ScoredLink> {
public ScoredLinkSort() {}
public int compare(ScoredLink t1, ScoredLink t2) {