Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
CheckSkeletonsForMultipleTracks.java+345added 1.1
MJC: Module to split skeleton clusters if they connect to multiple tracks

lcsim/src/org/lcsim/contrib/uiowa
CheckSkeletonsForMultipleTracks.java added at 1.1
diff -N CheckSkeletonsForMultipleTracks.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheckSkeletonsForMultipleTracks.java	16 May 2007 20:58:13 -0000	1.1
@@ -0,0 +1,345 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*; 
+import hep.physics.vec.*;
+
+import org.lcsim.util.*;
+import org.lcsim.util.decision.*;
+import org.lcsim.util.hitmap.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+import org.lcsim.recon.cluster.structural.likelihood.*;
+import org.lcsim.recon.cluster.util.BasicCluster;
+
+public class CheckSkeletonsForMultipleTracks extends Driver {
+
+    protected boolean m_debug = false;
+    protected String m_inputSkeletonClusterListName;
+    protected String m_outputSkeletonClusterListName;
+    protected String m_inputTrackListName;
+    protected String m_mipsListName;
+    protected String m_clumpsListName;
+    protected LikelihoodEvaluator m_eval = null;
+    public CheckSkeletonsForMultipleTracks(LikelihoodEvaluator eval, String inputTrackList, String inputSkeletonClusterList, String outputSkeletonClusterList, String mips, String clumps) {
+	m_eval = eval;
+	m_inputSkeletonClusterListName = inputSkeletonClusterList;
+	m_outputSkeletonClusterListName = outputSkeletonClusterList;
+	m_inputTrackListName = inputTrackList;
+	m_mipsListName = mips;
+	m_clumpsListName = clumps;
+    }
+
+    public void process(EventHeader event) {
+	// Read in lists
+	List<Cluster> inputSkeletons = event.get(Cluster.class, m_inputSkeletonClusterListName);
+	List<Track> inputTracks = event.get(Track.class, m_inputTrackListName);
+	List<Cluster> inputMIPs = event.get(Cluster.class, m_mipsListName);
+	List<Cluster> inputClumps = event.get(Cluster.class, m_clumpsListName);
+	List<Cluster> outputSkeletons = new Vector<Cluster>();
+
+	if (m_debug) { System.out.println("DEBUG: CheckSkeletonsForMultipleTracks looking at an event with "+inputSkeletons.size()+" skeletons, "+inputTracks.size()+" tracks, "+inputMIPs.size()+" MIPS, "+inputClumps.size()+" clumps"); }
+
+	// Match tracks to skeleton clusters.
+	// Cuts need to be tight, since we don't have a full set of clusters/hits
+	// and we don't want to be confused by a nearby track that shouldn't be
+	// hooked up to any of these.
+	Map<Cluster,List<Track>> map = new HashMap<Cluster,List<Track>> ();
+	LocalHelixExtrapolationTrackClusterMatcher tmpExtrap = new LocalHelixExtrapolationTrackClusterMatcher();
+	tmpExtrap.process(event);
+	tmpExtrap.setCutSeparation(14.0); // about two cells
+	for (Track tr : inputTracks) {
+	    Cluster matchedCluster = tmpExtrap.matchTrackToCluster(tr, inputSkeletons);
+	    if (map.get(matchedCluster) == null) { map.put(matchedCluster, new Vector<Track>()); }
+	    map.get(matchedCluster).add(tr);
+	}
+
+	// Loop over clusters, looking for ones which have multiple tracks.
+	for (Cluster clus : inputSkeletons) {
+	    List<Track> tracksForThisCluster = map.get(clus);
+	    if (tracksForThisCluster != null && tracksForThisCluster.size()>1) {
+		// More than one track -- need to fix it.
+
+		// Start by finding cluster components:
+		List<Cluster> vClumps = findSegmentsInList(clus, inputClumps);
+                List<Cluster> vMIPs = findSegmentsInList(clus, inputMIPs);
+		List<Cluster> vClumpsAndMIPs = new Vector<Cluster>();
+		vClumpsAndMIPs.addAll(vClumps);
+		vClumpsAndMIPs.addAll(vMIPs);
+
+		// Check for pathological case where >1 track hits the same subcluster
+		List<Link> confirmedLinks = new Vector<Link>(); // empty
+		List<Cluster> initialSkeletonList = assembleClusters(confirmedLinks, vClumpsAndMIPs);
+		boolean testInitiallyOK = checkUniqueTrackAssignments(tmpExtrap, initialSkeletonList, tracksForThisCluster);
+		while (!testInitiallyOK) { 
+		    // Pathological case. Only way out is to drop 1 (or more) tracks...
+		    if (m_debug) { System.out.println("Warning! In initial pass > 1 track already matched to a cluster segment. Trying to fix..."); }
+		    Track trackToRemove = findMultiplyAssignedTrack(tmpExtrap, initialSkeletonList, tracksForThisCluster);
+		    List<Track> newTracksForThisCluster = new Vector<Track>();
+		    newTracksForThisCluster.addAll(tracksForThisCluster);
+		    newTracksForThisCluster.remove(trackToRemove);
+		    tracksForThisCluster = newTracksForThisCluster;
+		    testInitiallyOK = checkUniqueTrackAssignments(tmpExtrap, initialSkeletonList, tracksForThisCluster);
+		}
+
+		// Compute likelihood of each pairwise link
+		Map<Link,Double> potentialTrackToTrackLinks = new HashMap<Link,Double>();
+		Map<Link,Double> potentialTrackToClumpLinks = new HashMap<Link,Double>();
+		Map<Link,Double> potentialClumpToClumpLinks = new HashMap<Link,Double>();
+		for (int i=0; i<vMIPs.size(); i++) {
+		    // First: Track-Track links, being careful not to double-count
+		    Cluster clus1 = vMIPs.get(i);
+		    for (int j=i+1; j<vMIPs.size(); j++) {
+			Cluster clus2 = vMIPs.get(j);
+			Link newLink = new Link(clus1,clus2);
+			double likelihood = m_eval.getLinkLikelihoodTrackToTrack(clus1,clus2);
+			potentialTrackToTrackLinks.put(newLink, new Double(likelihood));
+		    }
+		    // Next: Track-Clump links
+		    for (Cluster clus2 : vClumps) {
+			Link newLink = new Link(clus1,clus2);
+			double likelihood = m_eval.getLinkLikelihoodTrackToClump(clus1,clus2);
+			potentialTrackToClumpLinks.put(newLink, new Double(likelihood));
+		    }
+		}
+		for (int i=0; i<vClumps.size(); i++) {
+		    // Finally: Clump-Clump links, again being careful of double-counting
+		    Cluster clus1 = vClumps.get(i);
+		    for (int j=i+1; j<vClumps.size(); j++) {
+			Cluster clus2 = vClumps.get(j);
+			Link newLink = new Link(clus1,clus2);
+			double likelihood = m_eval.getLinkLikelihoodClumpToClump(clus1,clus2);
+			potentialClumpToClumpLinks.put(newLink, new Double(likelihood));
+		    }
+		}
+
+		// Start with very tight cuts and work backwards, adding each link
+		// as it drops below the current cut thresholds.
+		// If adding a link ever causes two tracks to be merged, mark it forbidden.
+		// Stop loosening the cuts once they reach their original level.
+		double minCutTrackTrack = 0.7;
+		double minCutTrackClump = 0.7;
+		double minCutClumpClump = 0.99;
+		double maxCutTrackTrack = 1.0;
+		double maxCutTrackClump = 1.0;
+		double maxCutClumpClump = 1.0;
+		List<Link> vetoedLinks = new Vector<Link>();
+		while (true) {
+		    // Find next link, if any
+		    double bestScaleFactor = 1.0;
+		    Link bestLink = null;
+		    for (Link l : potentialTrackToTrackLinks.keySet()) {
+			double linkLikelihood = potentialTrackToTrackLinks.get(l).doubleValue();
+			double linkScaleFactor = (linkLikelihood - minCutTrackTrack)/(maxCutTrackTrack-minCutTrackTrack);
+			if (bestLink==null || linkScaleFactor > bestScaleFactor) {
+			    bestScaleFactor = linkScaleFactor;
+			    bestLink = l;
+			}
+		    }
+		    for (Link l : potentialTrackToClumpLinks.keySet()) {
+			double linkLikelihood = potentialTrackToClumpLinks.get(l).doubleValue();
+			double linkScaleFactor = (linkLikelihood - minCutTrackClump)/(maxCutTrackClump-minCutTrackClump);
+			if (bestLink==null || linkScaleFactor > bestScaleFactor) {
+			    bestScaleFactor = linkScaleFactor;
+			    bestLink = l;
+			}
+		    }
+		    for (Link l : potentialClumpToClumpLinks.keySet()) {
+			double linkLikelihood = potentialClumpToClumpLinks.get(l).doubleValue();
+			double linkScaleFactor = (linkLikelihood - minCutClumpClump)/(maxCutClumpClump-minCutClumpClump);
+			if (bestLink==null || linkScaleFactor > bestScaleFactor) {
+			    bestScaleFactor = linkScaleFactor;
+			    bestLink = l;
+			}
+		    }
+		    if (bestScaleFactor < 0.0 || bestLink==null) {
+			// No more valid links
+			break;
+		    }
+		    // Assemble clusters with old link set (sanity check)
+		    List<Cluster> previousSkeletonList = assembleClusters(confirmedLinks, vClumpsAndMIPs);
+		    boolean testPreviousOK = checkUniqueTrackAssignments(tmpExtrap, previousSkeletonList, tracksForThisCluster);
+		    if (!testPreviousOK) { throw new AssertionError("Internal consistency failure"); }
+		    // Assemble clusters with new link added
+		    List<Link> testLinkList = new Vector<Link>();
+		    testLinkList.addAll(confirmedLinks);
+		    testLinkList.add(bestLink);
+		    List<Cluster> updatedSkeletonList = assembleClusters(testLinkList, vClumpsAndMIPs);
+		    // Verify no double tracks
+		    boolean verified = checkUniqueTrackAssignments(tmpExtrap, updatedSkeletonList, tracksForThisCluster);
+		    if (verified) {
+			confirmedLinks.add(bestLink);
+			potentialTrackToTrackLinks.remove(bestLink);
+			potentialTrackToClumpLinks.remove(bestLink);
+			potentialClumpToClumpLinks.remove(bestLink);
+		    } else {
+			vetoedLinks.add(bestLink);
+			potentialTrackToTrackLinks.remove(bestLink);
+			potentialTrackToClumpLinks.remove(bestLink);
+			potentialClumpToClumpLinks.remove(bestLink);
+		    }
+		}
+		List<Cluster> updatedSkeletonList = assembleClusters(confirmedLinks, vClumpsAndMIPs);
+
+		if (m_debug) {
+		    // Debug stuff
+		    Map<Cluster, List<Track>> mapClustersToTracks = new HashMap<Cluster, List<Track>>();
+		    for (Track tr : tracksForThisCluster) {
+			if (tr == null) { throw new AssertionError("Null track!"); }
+			Cluster matchedCluster = tmpExtrap.matchTrackToCluster(tr, updatedSkeletonList);
+			if (mapClustersToTracks.get(matchedCluster) == null) { mapClustersToTracks.put(matchedCluster, new Vector<Track>()); }
+			mapClustersToTracks.get(matchedCluster).add(tr);
+		    }
+		    System.out.println("DEBUG: Split one skeleton with "+tracksForThisCluster.size()+" tracks into "+updatedSkeletonList.size()+" clusters with "+confirmedLinks.size()+" confirmed links and "+vetoedLinks.size()+" vetoed links.");
+		    for (Cluster skel : mapClustersToTracks.keySet()) {
+			if (skel == null) { throw new AssertionError("Null cluster!"); }
+			List<Track> matchedTracks = mapClustersToTracks.get(skel);
+			if (matchedTracks.size()>1) { throw new AssertionError("Multiple tracks in skeleton!"); }
+			Track matchedTrack = matchedTracks.get(0);
+			String printme = new String("DEBUG:    New skeleton has "+skel.getCalorimeterHits().size()+" hits and matches to track with p="+(new BasicHep3Vector(matchedTrack.getMomentum())).magnitude()+".");
+			Map<MCParticle,Integer> particleHitCount = new HashMap<MCParticle,Integer>();
+			for (CalorimeterHit hit : skel.getCalorimeterHits()) {
+			    SimCalorimeterHit simHit = (SimCalorimeterHit) (hit);
+			    int nContributingParticles = simHit.getMCParticleCount();
+			    for (int i=0; i<nContributingParticles; i++) {
+				MCParticle part = simHit.getMCParticle(i);
+				if (particleHitCount.get(part) == null) { particleHitCount.put(part, new Integer(0)); }
+				Integer oldValue = particleHitCount.get(part);
+				particleHitCount.put(part, new Integer(oldValue+1));
+			    }
+			}
+			for (MCParticle part : particleHitCount.keySet()) {
+			    printme += " Contribution from "+part.getType().getName()+" (E="+part.getEnergy()+") of "+particleHitCount.get(part)+" hits.";
+			}
+			System.out.println(printme); 
+		    }
+		}
+		outputSkeletons.addAll(updatedSkeletonList);
+	    } else {
+		// Only one track or zero tracks
+		outputSkeletons.add(clus);
+	    }
+	}
+    
+	// Write out
+	event.put(m_outputSkeletonClusterListName, outputSkeletons, Cluster.class, 0);
+
+    }
+
+    List<Cluster> assembleClusters(List<Link> links, List<Cluster> segments) {
+	Set<Cluster> unassignedSegments = new HashSet<Cluster>();
+	unassignedSegments.addAll(segments); // initially full
+        List<Cluster> vLinkedClusters = new Vector<Cluster>(); // initially empty
+	while ( ! unassignedSegments.isEmpty() ) {
+	    Set<Cluster> linkedSegments = new HashSet<Cluster>();
+	    Cluster nextSegment = unassignedSegments.iterator().next();
+	    recursivelyAddSegment(nextSegment, linkedSegments, unassignedSegments, links);
+	    BasicCluster newMergedCluster = new BasicCluster();
+	    for (Cluster currentLinkedSegment : linkedSegments) {
+		newMergedCluster.addCluster(currentLinkedSegment);
+	    }
+            vLinkedClusters.add(newMergedCluster);
+	}
+	return vLinkedClusters;
+    }
+    void recursivelyAddSegment(Cluster currentSegment, Set<Cluster> linkedSegments, Set<Cluster> unassignedSegments, List<Link> links) {
+	boolean removedOK = unassignedSegments.remove(currentSegment);
+	boolean addedOK = linkedSegments.add(currentSegment);
+	if (!removedOK || !addedOK) { throw new AssertionError("Internal consistency failure"); }
+	for (Link currentLink : links) {
+	    if (currentLink.contains(currentSegment)) {
+		Cluster partner = currentLink.counterpart(currentSegment);
+		if (unassignedSegments.contains(partner)) {
+		    recursivelyAddSegment(partner, linkedSegments, unassignedSegments, links);
+		}
+	    }
+	}
+    }
+
+    boolean checkUniqueTrackAssignments(LocalHelixExtrapolationTrackClusterMatcher extrap, List<Cluster> clusters, List<Track> tracks) {
+	Track duplicateTrack = findMultiplyAssignedTrack(extrap, clusters, tracks);
+	return (duplicateTrack == null);
+    }
+
+//     boolean checkUniqueTrackAssignments(LocalHelixExtrapolationTrackClusterMatcher extrap, List<Cluster> clusters, List<Track> tracks) {
+// 	Map<Cluster, List<Track>> mapClustersToTracks = new HashMap<Cluster, List<Track>>();
+// 	for (Track tr : tracks) {
+// 	    Cluster matchedCluster = extrap.matchTrackToCluster(tr, clusters);
+// 	    if (mapClustersToTracks.get(matchedCluster) == null) { mapClustersToTracks.put(matchedCluster, new Vector<Track>()); }
+// 	    mapClustersToTracks.get(matchedCluster).add(tr);
+// 	}
+// 	for (Cluster clus : mapClustersToTracks.keySet()) {
+// 	    List<Track> matchedTracks = mapClustersToTracks.get(clus);
+// 	    if (matchedTracks.size()>1) {
+// 		// More than one track matched => fail
+// 		return false;
+// 	    }
+// 	}
+// 	// No instance of >1 track matched => OK
+// 	return true;
+//     }
+
+    // Utilities to find subclusters
+    protected List<Cluster> findSegmentsInList(Cluster clus, List<Cluster> listToScan) {
+	List<Cluster> flatListOfSubClusters = recursivelyFindSubClusters(clus);
+        List<Cluster> outputList = new Vector<Cluster>();
+        for (Cluster clusterFromListToScan : listToScan) {
+            if (flatListOfSubClusters.contains(clusterFromListToScan)) {
+	        outputList.add(clusterFromListToScan);
+	    }
+	}
+        return outputList;
+    }
+    protected List<Cluster> recursivelyFindSubClusters(Cluster clus) {
+	List<Cluster> output = new Vector<Cluster>();
+	for (Cluster dau : clus.getClusters()) {
+	    output.addAll(recursivelyFindSubClusters(dau));
+	}
+	output.add(clus);
+	return output;
+    }
+
+    // Copied from org.lcsim.recon.cluster.structural.Link
+    private class Link {
+	public Link(Cluster clus1, Cluster clus2) {
+	    c1 = clus1;
+	    c2 = clus2;
+	}
+	public boolean contains(Cluster clus) {
+	    return (clus==c1 || clus==c2);
+	}
+	public Cluster counterpart(Cluster clus) {
+	    if (clus==c1) {
+		return c2;
+	    } else if (clus==c2) {
+		return c1;
+	    } else {
+		return null;
+	    }
+	}
+	protected Cluster c1 = null;
+	protected Cluster c2 = null;
+    }
+
+    // Kind of hackish
+    Track findMultiplyAssignedTrack(LocalHelixExtrapolationTrackClusterMatcher extrap, List<Cluster> clusters, List<Track> tracks) {
+	Map<Cluster, List<Track>> mapClustersToTracks = new HashMap<Cluster, List<Track>>();
+	for (Track tr : tracks) {
+	    if (tr == null) { throw new AssertionError("Null track!"); }
+	    Cluster matchedCluster = extrap.matchTrackToCluster(tr, clusters);
+	    if (mapClustersToTracks.get(matchedCluster) == null) { mapClustersToTracks.put(matchedCluster, new Vector<Track>()); }
+	    mapClustersToTracks.get(matchedCluster).add(tr);
+	}
+	for (Cluster clus : mapClustersToTracks.keySet()) {
+	    if (clus == null) { throw new AssertionError("Null cluster!"); }
+	    List<Track> matchedTracks = mapClustersToTracks.get(clus);
+	    if (matchedTracks.size()>1) {
+		// More than one track matched => fail
+		return matchedTracks.get(0);
+	    }
+	}
+	// No instance of >1 track matched => OK
+	return null;
+    }
+
+    public void setDebug(boolean debug) { m_debug = debug; }
+}
CVSspam 0.2.8