Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDTreeDriver.java+163-541.18 -> 1.19
MJC: (contrib) Add option to allow MIPs to cross ECAL/HCAL boundary (and also DTree clusters); bugfix for helix-fit-based MIP quality check; simplify book-keeping a bit

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.18 -> 1.19
diff -u -r1.18 -r1.19
--- ReclusterDTreeDriver.java	31 May 2008 01:34:18 -0000	1.18
+++ ReclusterDTreeDriver.java	5 Jun 2008 16:58:06 -0000	1.19
@@ -23,6 +23,7 @@
 import org.lcsim.geometry.Detector;
 import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
 import org.lcsim.geometry.*;
+import org.lcsim.util.decision.*;
 
 /**
   * An algorithm to recluster showers using E/p information
@@ -33,7 +34,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.18 2008/05/31 01:34:18 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.19 2008/06/05 16:58:06 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -48,8 +49,9 @@
     protected boolean m_photonDebug = false;
     protected boolean m_photonSplitDebug = false;
     protected boolean m_jetDebug = false;
-    protected boolean m_writeExtraEventOutput = true;
+    protected boolean m_writeExtraEventOutput = false;
 
+    protected boolean m_oldMipFinderCrossesTrees = true;
     protected boolean m_useNewMipFinder = true;
     protected boolean m_useOldMipFinder = true;
     protected boolean m_removePoorQualityMips = false;
@@ -77,6 +79,10 @@
     protected double m_jetScoreThreshold = 0.7; // don't hard-code
     protected double m_jetTolerance = 1.5; // don't hard-code
 
+    public void writeExtraEventOutput(boolean writeExtra) {
+	m_writeExtraEventOutput = writeExtra;
+    }
+    
     public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList) {
 	System.out.println("ReclusterDTreeDriver version 0.2");
 	initTrackMatch();
@@ -87,12 +93,24 @@
 	m_mcList = mcList;
 	m_eval = new LikelihoodEvaluatorWrapper();
 	m_outputParticleListName = "DTreeReclusteredParticles";
+
+	// Move this to a better place...
+	HitNearBarrelEndcapBoundaryDecision dec = new HitNearBarrelEndcapBoundaryDecision(6.0, 15.0, 1);
+	add(dec);
+	add(new ListFilterDriver(dec, "EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
+	add(new ListFilterDriver(dec, "HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
+	add(new ListFilterDriver(dec, "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
+	add(new ListFilterDriver(dec, "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
     }
 
     public void process(EventHeader event) {
 	super.debugProcess(event);
 	m_event = event;
 
+	if (m_oldMipFinderCrossesTrees) {
+	    m_allowComponentsToStraddleLargeClusters = true;
+	}
+
 	// Read in
 	List<Cluster> dTreeClusters = event.get(Cluster.class, m_dTreeClusterListName);
 	List<Track> trackList = event.get(Track.class, m_inputTrackList);
@@ -131,18 +149,58 @@
 	}
 
 	// Lists of subclusters
-	List<Cluster> mips = new Vector<Cluster>();
-	List<Cluster> mipsOld = new Vector<Cluster>();
-	List<Cluster> mipsNew = new Vector<Cluster>();
-	List<Cluster> clumps = new Vector<Cluster>();
-	List<Cluster> leftoverHitClusters = new Vector<Cluster>();
 	// Make maps (from subclusters to tree)
 	Map<Cluster, Cluster> treeOfMip = new HashMap<Cluster, Cluster>();
 	Map<Cluster, Cluster> treeOfClump = new HashMap<Cluster,Cluster>();
 	Map<Cluster, Cluster> treeOfLeftoverHits = new HashMap<Cluster,Cluster>();
+
+	// First, look for MIPs in whole event (using only the "oldMipFinder" since the
+	// new one can't handle whole-event combinatorics.)
+	// Start by looking up what hits are we free to cluster (photon hits excluded)
+	List<CalorimeterHit> availableHitsECAL = new Vector<CalorimeterHit>();
+	List<CalorimeterHit> availableHitsHCAL = new Vector<CalorimeterHit>();
+	for (Cluster clus : dTreeClusters_Ecal) {
+	    availableHitsECAL.addAll(clus.getCalorimeterHits());
+	}
+	for (Cluster clus : dTreeClusters_Hcal) {
+	    availableHitsHCAL.addAll(clus.getCalorimeterHits());
+	}
+	// Run the clusterer
+	Clusterer oldMipFinder = new TrackClusterDriver();
+	List<Cluster> firstPassMipsOldEcal = oldMipFinder.createClusters(availableHitsECAL);
+	List<Cluster> firstPassMipsOldHcal = oldMipFinder.createClusters(availableHitsHCAL);
+	// Check quality & back out any bad MIPs:
+	if (m_removePoorQualityMips) {
+	    removePoorQualityMips(firstPassMipsOldEcal);
+	    removePoorQualityMips(firstPassMipsOldHcal);
+	}
+	List<Cluster> firstPassMipsOld = new Vector<Cluster>();
+	firstPassMipsOld.addAll(firstPassMipsOldEcal);
+	firstPassMipsOld.addAll(firstPassMipsOldHcal);
+	Set<CalorimeterHit> firstPassMipHits = new HashSet<CalorimeterHit>();
+	for (Cluster mip : firstPassMipsOld) {
+	    firstPassMipHits.addAll(mip.getCalorimeterHits());
+	}
+
+	// Book-keeping...
+	Map<Cluster, List<Cluster>> targetsInTree = new HashMap<Cluster, List<Cluster>>();
+	Map<Cluster, Cluster> treeOfSharedCluster = new HashMap<Cluster,Cluster>();
+
+	List<Cluster> mipsOld = new Vector<Cluster>();
+	List<Cluster> mipsNew = new Vector<Cluster>();
+	List<Cluster> clumps = new Vector<Cluster>();
+	List<Cluster> leftoverHitClusters = new Vector<Cluster>();
 	List<Cluster> treesWithNoStructure = new Vector<Cluster>();
+	if (m_oldMipFinderCrossesTrees) {
+	    mipsOld.addAll(firstPassMipsOld);
+	}
 
+	// Now look for mips & clumps inside DTreeClusters
 	for (Cluster dTreeCluster : dTreeClusters) {
+	    // For book-keeping
+	    List<Cluster> targetList = new Vector<Cluster>();
+	    targetsInTree.put(dTreeCluster, targetList);
+	    // ECAL or HCAL?
 	    boolean treeInECAL = (dTreeClusters_Ecal.contains(dTreeCluster));
 	    boolean treeInHCAL = (dTreeClusters_Hcal.contains(dTreeCluster));
 	    if ( !(treeInECAL || treeInHCAL) || (treeInECAL && treeInHCAL) ) { throw new AssertionError("Book-keeping error"); }
@@ -154,64 +212,51 @@
 	    }
 	    // Prepare hitmap, initially holdling all the hits in the cluster
 	    HitMap hitsInTree = new HitMap();
+	    // Look for structure
 	    List<Cluster> mipClustersOld = new Vector<Cluster>();
 	    List<Cluster> mipClustersNew = new Vector<Cluster>();
 	    List<Cluster> clumpClusters  = new Vector<Cluster>();
-
-	    findStructureInsideCluster(dTreeCluster, treeInECAL, mipClustersOld, mipClustersNew, clumpClusters, hitsInTree);
- 	    mipsOld.addAll(mipClustersOld);
+	    if (m_oldMipFinderCrossesTrees) {
+		findStructureInsideClusterSupplyingOldMips(dTreeCluster, treeInECAL, mipClustersOld, mipClustersNew, clumpClusters, hitsInTree, firstPassMipsOld);
+	    } else {
+		findStructureInsideCluster(dTreeCluster, treeInECAL, mipClustersOld, mipClustersNew, clumpClusters, hitsInTree);
+	    }
+	    // Record what we found
+	    targetList.addAll(mipClustersOld);
+	    targetList.addAll(mipClustersNew);
+	    targetList.addAll(clumpClusters);
  	    mipsNew.addAll(mipClustersNew);
- 	    mips.addAll(mipClustersOld);
- 	    mips.addAll(mipClustersNew);
  	    clumps.addAll(clumpClusters);
-	    for (Cluster mip : mipClustersOld) { treeOfMip.put(mip, dTreeCluster); }
-	    for (Cluster mip : mipClustersNew) { treeOfMip.put(mip, dTreeCluster); }
-	    for (Cluster clump : clumpClusters) { treeOfClump.put(clump, dTreeCluster); }
+	    if (m_oldMipFinderCrossesTrees) {
+		// No need to record these MIPs again -- we already
+		// made a note of them when running on the whole event
+	    } else {
+		mipsOld.addAll(mipClustersOld);
+	    }
+
 	    int numMipsInTree = mipClustersNew.size() + mipClustersOld.size();
 	    int numClumpsInTree = clumpClusters.size();
 	    if (numMipsInTree==0 && numClumpsInTree==0 && hitsInTree.size()>=minHitsToBeTreatedAsCluster) {
 		// No structure found in tree. Treat it as a block.
 		treesWithNoStructure.add(dTreeCluster);
+		targetList.add(dTreeCluster);
 	    } else {
 		// Structure found in tree. Rest are leftover hits:
 		if (hitsInTree.size() > 0) {
-		    List<CalorimeterHit> leftoverHits = new Vector<CalorimeterHit>(hitsInTree.values());
-		    if (leftoverHits.contains(null)) { throw new AssertionError("null hit in leftoverHits"); }
 		    BasicCluster leftoverHitCluster = new BasicCluster();
-		    for (CalorimeterHit hit : leftoverHits) {
+		    for (CalorimeterHit hit : hitsInTree.values()) {
+			if (hit == null) { throw new AssertionError("null hit in leftoverHits"); }
 			leftoverHitCluster.addHit(hit);
 		    }
 		    leftoverHitClusters.add(leftoverHitCluster);
-		    treeOfLeftoverHits.put(leftoverHitCluster, dTreeCluster);
+		    treeOfSharedCluster.put(leftoverHitCluster, dTreeCluster);
 		}
 	    }
 	}
-	//Map<Cluster, Cluster> treeOfComponent = new HashMap<Cluster,Cluster>();
-	//treeOfComponent.putAll(treeOfMip);
-	//treeOfComponent.putAll(treeOfClump);
-	//treeOfComponent.putAll(treeOfLeftoverHits);
-	Map<Cluster, Cluster> treeOfSharedCluster = new HashMap<Cluster,Cluster>();
-	treeOfSharedCluster.putAll(treeOfLeftoverHits);
-	Map<Cluster, List<Cluster>> targetsInTree = new HashMap<Cluster, List<Cluster>>();
-	for (Cluster target : treeOfMip.keySet()) {
-	    Cluster tree = treeOfMip.get(target);
-	    List<Cluster> tmpList = new Vector<Cluster>();
-	    tmpList.add(target);
-	    targetsInTree.put(tree, tmpList);
-	}
-	for (Cluster target : treeOfClump.keySet()) {
-	    Cluster tree = treeOfClump.get(target);
-	    List<Cluster> tmpList = new Vector<Cluster>();
-	    tmpList.add(target);
-	    targetsInTree.put(tree, tmpList);
-	}
-	for (Cluster target : treeOfLeftoverHits.keySet()) {
-	    Cluster tree = treeOfLeftoverHits.get(target);
-	    List<Cluster> tmpList = new Vector<Cluster>();
-	    tmpList.add(target);
-	    targetsInTree.put(tree, tmpList);
-	}
-	
+
+	List<Cluster> mips = new Vector<Cluster>();
+	mips.addAll(mipsOld);
+	mips.addAll(mipsNew);
 
 	if (m_debug) {
 	    System.out.println("Found "+mips.size()+" mips, "+clumps.size()+" clumps, "+photons.size()+" photons, "+leftoverHitClusters.size()+" leftover-hit-clusters, "+treesWithNoStructure.size()+" large DTrees with no structure, and "+trackList.size()+" tracks in event.");
@@ -1230,6 +1275,62 @@
 	return output;
     }
 
+    void findStructureInsideClusterSupplyingOldMips(Cluster largeCluster, boolean inECAL, List<Cluster> mipsOldInside, List<Cluster> mipsNewInside, List<Cluster> clumpsInside, HitMap unusedHits, List<Cluster> mipsOld) {
+	// Verify
+	if (mipsOldInside.size() != 0 || mipsNewInside.size() != 0 || clumpsInside.size() != 0 || unusedHits.size() != 0) {
+	    throw new AssertionError("Empty, non-null input lists required.");
+	}
+	// Book-keeping
+	for (CalorimeterHit hit : largeCluster.getCalorimeterHits()) {
+	    unusedHits.put(hit.getCellID(), hit);
+	}
+	// Look to see which old MIPs are found in this cluster:
+	for (Cluster mip : mipsOld) {
+	    Set<CalorimeterHit> overlappingHitsThisMIP = new HashSet<CalorimeterHit>();
+	    for (CalorimeterHit mipHit : mip.getCalorimeterHits()) {
+		long id = mipHit.getCellID();
+		if (unusedHits.keySet().contains(id)) {
+		    overlappingHitsThisMIP.add(mipHit);
+		    unusedHits.remove(id);
+		}
+	    }
+	    if (overlappingHitsThisMIP.size()>0) {
+		mipsOldInside.add(mip);
+	    }		    
+	}
+	// MIPs (new)
+	if (m_useNewMipFinder) {
+	    double radius = 0.0;
+	    if (inECAL) {
+		radius = m_newMipFinderRadiusECAL;
+	    } else {
+		radius = m_newMipFinderRadiusHCAL;
+	    }
+	    Clusterer newMipFinder = new org.lcsim.recon.cluster.mipfinder.NonProjectiveMipFinder(radius);
+	    List<Cluster> mipClustersNew = newMipFinder.createClusters(unusedHits);
+	    if (m_removePoorQualityMips) {
+		removePoorQualityMips(mipClustersNew);
+	    }
+	    for (Cluster mip : mipClustersNew) {
+		mipsNewInside.add(mip);
+		for (CalorimeterHit hit : mip.getCalorimeterHits()) {
+		    unusedHits.remove(hit.getCellID());
+		}
+	    }
+	}
+	// Clumps
+	Clusterer clumpfinder = new ClumpFinder();
+	List<Cluster> clumpClusters = clumpfinder.createClusters(unusedHits);	    
+	clumpsInside.addAll(clumpClusters);
+	for (Cluster clump : clumpClusters) {
+	    if (clump.getCalorimeterHits().size() == 0) { throw new AssertionError("clump has no hits"); }
+	    if (clump.getCalorimeterHits().contains(null)) { throw new AssertionError("null hit in clump"); }
+	    for (CalorimeterHit hit : clump.getCalorimeterHits()) {
+		unusedHits.remove(hit.getCellID());
+	    }
+	}
+    }
+
     void findStructureInsideCluster(Cluster largeCluster, boolean inECAL, List<Cluster> mipsOldInside, List<Cluster> mipsNewInside, List<Cluster> clumpsInside, HitMap unusedHits) {
 	// Verify
 	if (mipsOldInside.size() != 0 || mipsNewInside.size() != 0 || clumpsInside.size() != 0 || unusedHits.size() != 0) {
@@ -3083,21 +3184,30 @@
 	for (CalorimeterHit calHit : mip.getCalorimeterHits()) {
 	    double pos[] = calHit.getPosition();
 	    String subdetName = calHit.getSubdetector().getName();
-	    double err = 0.0;
+	    double err_rphi = 0.0;
+	    double err_z = 0.0;
 	    if (subdetName.compareTo("EMBarrel")==0) {
-		// Barrel -- r fixed but phi uncertain -- rphi error = cell size / sqrt(12)
-		err = 3.5 / 3.464;
+		// Barrel -- r fixed but phi uncertain 
+		//   rphi error = cell size / sqrt(12)
+		//   z error = cell size / sqrt(12)
+		err_rphi = 3.5 / 3.464;
+		err_z = 3.5 / 3.464;
 	    } else if (subdetName.compareTo("EMEndcap")==0) {
-		// Endcap -- z fixed but r and phi uncertain -- rphi error = cell size / sqrt(12) ??
-		err = 3.5 / 3.464;
+		// Endcap -- z fixed but r and phi uncertain
+		//   rphi error = cell size / sqrt(12) ??
+		//   z error = silicon width / sqrt(12)
+		err_rphi = 3.5 / 3.464;
+		err_z = 0.32 / 3.464; // 320 micron silicon
 	    } else if (subdetName.compareTo("HADBarrel")==0) {
-		err = 10.0 / 3.464;
+		err_rphi = 10.0 / 3.464;
+		err_z = 10.0 / 3.464;
 	    } else if (subdetName.compareTo("HADEndcap")==0) {
-		err = 10.0 / 3.464;
+		err_rphi = 10.0 / 3.464;
+		err_z = 3.0 / 3.464; // 1.2 mm gas OR 5.0 mm scint... pick 3mm as a compromise
 	    } else {
 		throw new AssertionError("Subdet not recognized: "+subdetName);
 	    }
-	    org.lcsim.fit.helicaltrack.HelicalTrackHit fitHit = new org.lcsim.fit.helicaltrack.HelicalTrackHit(pos[0], pos[1], pos[2], err);
+	    org.lcsim.fit.helicaltrack.HelicalTrack3DHit fitHit = new org.lcsim.fit.helicaltrack.HelicalTrack3DHit(pos[0], pos[1], pos[2], err_rphi, err_z);
 	    hitsToFit.add(fitHit);
 	}
 	// Now do the fit:
@@ -3128,7 +3238,6 @@
 			cut = 0.01; // Less stringent for long MIPs which are likely to be sound but may be multiple-scattered
 		    }
 		    boolean qualityPass = (quality >= cut);
-		    //System.out.println("DEBUG: MIP quality for "+countHits+" hits, "+countLayers+" layers, "+countDuplicates+" duplicates: "+chisq+" / "+ndf+" => "+prob+" => quality="+quality+" => passes="+qualityPass);
 		    return qualityPass;
 		} else {
 		    // Not enough points for chisq check
CVSspam 0.2.8