Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDTreeDriver.java+92-221.26 -> 1.27
MJC: (contrib) Use cone-based linking within main reconstruction pass of PFA. Also sketched out how to use cone algorithms for shared hits, but didn't enable them yet.

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.26 -> 1.27
diff -u -r1.26 -r1.27
--- ReclusterDTreeDriver.java	2 Jul 2008 20:49:42 -0000	1.26
+++ ReclusterDTreeDriver.java	8 Jul 2008 16:35:34 -0000	1.27
@@ -34,7 +34,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.26 2008/07/02 20:49:42 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.27 2008/07/08 16:35:34 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -283,6 +283,7 @@
 	// Redo ambiguous tracks hackishly:
 	// Similarly, redo track -> cluster map to use tweaked tracks
 	Map<Track, Cluster> tweakedTracksMatchedToClusters = new HashMap<Track,Cluster>();
+	Map<Cluster, Track> clustersMatchedToTweakedTracks = new HashMap<Cluster, Track>();
 	List<Track> tweakedTracks = new Vector<Track>();
 	Map<Track, Track> mapOrigTrackToTweakedTrack = new HashMap<Track,Track>();
 	tweakedTracks.addAll(uniquelyMatchedTracks);
@@ -295,12 +296,14 @@
 	    for (Track origTrack : trackSet) {
 		Cluster clus = tracksMatchedToClusters.get(origTrack);
 		tweakedTracksMatchedToClusters.put(tweakedTrack, clus); // over-writes sometimes, but that's OK.
+		clustersMatchedToTweakedTracks.put(clus, tweakedTrack);
 		mapOrigTrackToTweakedTrack.put(origTrack, tweakedTrack);
 	    }
 	}
 	for (Track tr : uniquelyMatchedTracks) {
 	    Cluster clus = tracksMatchedToClusters.get(tr);
 	    tweakedTracksMatchedToClusters.put(tr, clus);
+	    clustersMatchedToTweakedTracks.put(clus, tr);
 	}
 
 	// Track seeds
@@ -355,19 +358,18 @@
 	}
 
 	// Prep for linking
-	List<Cluster> linkableClusters = new Vector<Cluster>();
+	List<Cluster> linkableClustersExcludingPhotons = new Vector<Cluster>();
 	List<Cluster> smallClustersToShare = new Vector<Cluster>();
 	List<Cluster> leftoverHitClustersToShare = new Vector<Cluster>();
 	List<Cluster> leftoverHitClustersToShareECAL = new Vector<Cluster>();
 	List<Cluster> leftoverHitClustersToShareHCAL = new Vector<Cluster>();
 	List<Cluster> leftoverHitClustersAllowedInShowers = new Vector<Cluster>();
-	linkableClusters.addAll(mips);
-	linkableClusters.addAll(modifiedPhotonClusters);
-	linkableClusters.addAll(clumps);
-	linkableClusters.addAll(treesWithNoStructure);
+	linkableClustersExcludingPhotons.addAll(mips);
+	linkableClustersExcludingPhotons.addAll(clumps);
+	linkableClustersExcludingPhotons.addAll(treesWithNoStructure);
 	for (Cluster clus : leftoverHitClusters) {
 	    if (seeds.contains(clus)) {
-		linkableClusters.add(clus);
+		linkableClustersExcludingPhotons.add(clus);
 	    } else if (clus.getCalorimeterHits().size() < 3) {
 		smallClustersToShare.add(clus);
 	    } else {
@@ -397,6 +399,9 @@
 	    }
 	}
 	smallClustersToShare.addAll(photonFragments);
+	List<Cluster> linkableClusters = new Vector<Cluster>();
+	linkableClusters.addAll(modifiedPhotonClusters);
+	linkableClusters.addAll(linkableClustersExcludingPhotons);
 
 	// Initially, cheat
 	resetPotentialLinks();
@@ -429,6 +434,9 @@
 	    initPotentialLinks_MiscMisc(seedLeftoverHitClusters, treesWithNoStructure, thresholdForProximity, "SmallSeed", "LargeStructurelessTree");
 
 	    initPotentialLinks_MiscSelf(treesWithNoStructure, thresholdForProximityClump, "LargeStructurelessTree", false);
+
+	    // TEST
+	    initPotentialLinks_Cone(seeds, linkableClustersExcludingPhotons, clustersMatchedToTweakedTracks, 0.95, 0.9);
 	}
 
 	// Done making links. Prep & build skeletons:
@@ -452,22 +460,84 @@
 	Map<Track, Set<Track>> mapTrackToJet = null;
 
 	// Set up sharing
+	boolean proximityShareForSmallClusters = true;
+	boolean dTreeShareForHaloECAL = true;
+	boolean dTreeShareForHaloHCAL = true;
+	boolean steveMipShareForHaloHCAL = false;
+	boolean taejeongMipShareForHaloHCAL = false;
 	List<SharedClusterGroup> allSharedClusters = new Vector<SharedClusterGroup>();
-	ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
-	SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAlgForSmallClusters);
-	sharedSmallDTreeClusters.createShares(linkableClusters);
-	sharedSmallDTreeClusters.rebuildHints();
-	allSharedClusters.add(sharedSmallDTreeClusters);
-	DTreeClusterSharingAlgorithm dTreeSharingAlgECAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, 150.0);
-	DTreeClusterSharingAlgorithm dTreeSharingAlgHCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, 200.0);
-	SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, dTreeSharingAlgECAL);
-	SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, dTreeSharingAlgHCAL);
-	sharedLeftoverHitClustersECAL.createShares(linkableClusters);
-	sharedLeftoverHitClustersECAL.rebuildHints();
-	sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
-	sharedLeftoverHitClustersHCAL.rebuildHints();
-	allSharedClusters.add(sharedLeftoverHitClustersECAL);
-	allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+	// Small clusters
+	if (proximityShareForSmallClusters) {
+	    ProximityClusterSharingAlgorithm proximityAlgForSmallClusters = new ProximityClusterSharingAlgorithm(40.0, 250.0);
+	    SharedClusterGroup sharedSmallDTreeClusters = new SharedClusterGroup(smallClustersToShare, proximityAlgForSmallClusters);
+	    sharedSmallDTreeClusters.createShares(linkableClusters);
+	    sharedSmallDTreeClusters.rebuildHints();
+	    allSharedClusters.add(sharedSmallDTreeClusters);
+	} else {
+	    throw new AssertionError("Unhandled case!");
+	}
+	// ECAL halo
+	if (dTreeShareForHaloECAL) {
+	    DTreeClusterSharingAlgorithm dTreeSharingAlgECAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 20.0, 150.0);
+	    SharedClusterGroup sharedLeftoverHitClustersECAL = new SharedClusterGroup(leftoverHitClustersToShareECAL, dTreeSharingAlgECAL);
+	    sharedLeftoverHitClustersECAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersECAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersECAL);
+	} else {
+	    throw new AssertionError("Unhandled case!");
+	}
+	// HCAL halo
+	if (dTreeShareForHaloHCAL) {
+	    DTreeClusterSharingAlgorithm dTreeSharingAlgHCAL = new DTreeClusterSharingAlgorithm(treeOfSharedCluster, targetsInTree, 50.0, 200.0);
+	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, dTreeSharingAlgHCAL);
+	    sharedLeftoverHitClustersHCAL.createShares(linkableClusters);
+	    sharedLeftoverHitClustersHCAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+	} else if (steveMipShareForHaloHCAL) {
+	    SteveMipWrapper tmpWrapper = new SteveMipWrapper();
+	    tmpWrapper.process(event);
+	    SteveMIPReassignmentAlgorithm tmpMipAlg = new SteveMIPReassignmentAlgorithm(event, 1.0);
+	    DownstreamTrackClusterSharingAlgorithm coneSharingAlgHCAL  = new DownstreamTrackClusterSharingAlgorithm(clustersMatchedToTweakedTracks, tmpMipAlg); // TEST
+	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneSharingAlgHCAL); // TEST
+	    List<Cluster> tmpSeedList = new Vector<Cluster>(); // TEST
+	    tmpSeedList.addAll(seeds); // TEST	
+	    sharedLeftoverHitClustersHCAL.createShares(tmpSeedList); // TEST
+	    sharedLeftoverHitClustersHCAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+	} else if (taejeongMipShareForHaloHCAL) {
+	    LocalHelixExtrapolator tmpFindCluster = new LocalHelixExtrapolator(); // TEST    
+	    tmpFindCluster.process(m_event); // TEST
+	    Map<Track, Set<Cluster>> tmpMap = new HashMap<Track, Set<Cluster>>(); // TEST
+	    for (Cluster clus : clustersMatchedToTweakedTracks.keySet()) {
+		Track tr = clustersMatchedToTweakedTracks.get(clus);
+		Set<Cluster> tmpSet = new HashSet<Cluster>();
+		tmpSet.add(clus);
+		tmpMap.put(tr, tmpSet);
+		System.out.println("Seed with "+clus.getCalorimeterHits().size()+" -> Track");
+	    }
+	    System.out.println("DEBUG: [seeds] contains "+seeds.size()+" clusters.");
+	    System.out.println("DEBUG: [clustersMatchedToTweakedTracks] contains "+clustersMatchedToTweakedTracks.keySet().size()+" clusters.");
+	    CachedMIPReassignmentAlgorithm tmpMipAlg = new CachedMIPReassignmentAlgorithm(1.0, tmpFindCluster, tmpMap, mips, allHits); // TEST
+	    DownstreamTrackClusterSharingAlgorithm coneSharingAlgHCAL  = new DownstreamTrackClusterSharingAlgorithm(clustersMatchedToTweakedTracks, tmpMipAlg); // TEST
+	    SharedClusterGroup sharedLeftoverHitClustersHCAL = new SharedClusterGroup(leftoverHitClustersToShareHCAL, coneSharingAlgHCAL); // TEST
+	    List<Cluster> tmpSeedList = new Vector<Cluster>(); // TEST
+	    tmpSeedList.addAll(seeds); // TEST	
+	    sharedLeftoverHitClustersHCAL.createShares(tmpSeedList); // TEST
+	    sharedLeftoverHitClustersHCAL.rebuildHints();
+	    allSharedClusters.add(sharedLeftoverHitClustersHCAL);
+	} else {
+	    throw new AssertionError("Unhandled case!");
+	}
+
+
+	for (Cluster clus : clustersMatchedToTweakedTracks.keySet()) {
+		Track tr = clustersMatchedToTweakedTracks.get(clus);
+		double rawEnergy = energy(clus);
+		double totEnergy = energy(clus, allSharedClusters);
+		double trackMom = new BasicHep3Vector(tr.getMomentum()).magnitude();
+		System.out.println("Seed cluster ["+clus.getCalorimeterHits().size()+"] -> Track [p="+trackMom+"] -> "
+				   +"raw energy "+rawEnergy+" -> total energy "+totEnergy);
+	}				   
 
 	// Iterate to build clusters:
 	for (int iIter=0; iIter<10; iIter++) {
CVSspam 0.2.8