Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ClusterSharingAlgorithm.java+9added 1.1
DTreeClusterSharingAlgorithm.java+82added 1.1
ProximityClusterSharingAlgorithm.java+55added 1.1
SharedCluster.java+42added 1.1
SharedClusterGroup.java+55added 1.1
ReclusterDTreeDriver.java+1-1171.25 -> 1.26
ReclusterDriver.java+1-1421.23 -> 1.24
+245-259
5 added + 2 modified, total 7 files
MJC: (contrib) Refactor + minor fixes to PFA

lcsim/src/org/lcsim/contrib/uiowa
ClusterSharingAlgorithm.java added at 1.1
diff -N ClusterSharingAlgorithm.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ClusterSharingAlgorithm.java	2 Jul 2008 20:49:42 -0000	1.1
@@ -0,0 +1,9 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.Map;
+import java.util.List;
+import org.lcsim.event.Cluster;
+
+public interface ClusterSharingAlgorithm {
+    public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets);
+}

lcsim/src/org/lcsim/contrib/uiowa
DTreeClusterSharingAlgorithm.java added at 1.1
diff -N DTreeClusterSharingAlgorithm.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ DTreeClusterSharingAlgorithm.java	2 Jul 2008 20:49:42 -0000	1.1
@@ -0,0 +1,82 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.event.*;
+
+// In this weighting scheme
+//   * Components not in the same DTree cluster get a significant penalty factor
+//   * Components in the same DTree cluster get a small additive bonus (to take the minimum above zero)
+// Watch out for the notation:
+//   * "components" are the shared hits. These are members of one and only one tree.
+//   * "targets" are the mips/clumps/etc. These may be members of more than one tree.
+public class DTreeClusterSharingAlgorithm implements ClusterSharingAlgorithm 
+{
+    double m_minimumDistance;
+    double m_maximumDistance;
+    Map<Cluster,Cluster> m_treeOfSharedCluster;
+    Map<Cluster,List<Cluster>> m_targetsInTree;
+
+    // Constructor supplies a list of DTree clusters
+    public DTreeClusterSharingAlgorithm(Map<Cluster,Cluster> treeOfSharedCluster, Map<Cluster,List<Cluster>> targetsInTree, double minimumDistance, double maximumDistance) {
+	m_treeOfSharedCluster = treeOfSharedCluster;
+	m_targetsInTree = targetsInTree;
+	m_minimumDistance = minimumDistance;
+	m_maximumDistance = maximumDistance;
+    }
+
+    // Interface
+    public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
+	Cluster parentDTreeCluster = m_treeOfSharedCluster.get(clusterToShare);
+	if (parentDTreeCluster == null) { throw new AssertionError("ERROR: DTreeClusterSharingAlgorithm was passed a cluster that's not part of a DTree."); }
+	Map<Cluster,Double> outputMap = new HashMap<Cluster,Double>();
+	for (Cluster target : clusterTargets) {
+	    double distance = proximity(clusterToShare, target);
+	    if (distance == 0.0) { throw new AssertionError("ERROR: Distance is zero... configuration error"); }
+	    double scaledDistance = distance / m_minimumDistance;
+	    double weight = 1.0 / (scaledDistance*scaledDistance*scaledDistance);
+	    if (weight > 1.0) { 
+		// Don't go above 1 based on proximity
+		weight = 1.0; 
+	    }
+	    // Now, is the target part of the same DTreeCluster?
+	    // This may return null if the target is something that doesn't come from
+	    // a DTreeCluster, e.g. a photon
+	    List<Cluster> targetsInTree = m_targetsInTree.get(parentDTreeCluster);
+	    if (targetsInTree != null && targetsInTree.contains(target)) {
+		// From same DTreeCluster => apply additive bonus and don't impose a distance cutoff
+		weight += 0.05;
+		outputMap.put(target, new Double(weight));
+	    } else {
+		// Not from same DTreeCluster => apply penalty factor and impose a distance cutoff
+		if (distance < m_maximumDistance) {
+		    weight *= 0.2;
+		    outputMap.put(target, new Double(weight));
+		} else {
+		    // doesn't appear in output at all
+		}
+	    }
+	}
+	return outputMap;
+    }
+
+   protected double proximity(Cluster clus1, Cluster clus2) {
+	if (clus1.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
+	if (clus2.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
+	double minDist = 0;
+	boolean found = false;
+	for (CalorimeterHit hit1 : clus1.getCalorimeterHits()) {
+	    Hep3Vector hitPosition1 = new BasicHep3Vector(hit1.getPosition());
+	    for (CalorimeterHit hit2 : clus2.getCalorimeterHits()) {
+		if (hit1 == hit2) { throw new AssertionError("Hits overlap!"); }
+		Hep3Vector hitPosition2 = new BasicHep3Vector(hit2.getPosition());
+		double distance = VecOp.sub(hitPosition1,hitPosition2).magnitude();
+		if (distance<minDist || found==false) {
+		    found = true;
+		    minDist = distance;
+		}
+	    }
+	}
+	return minDist;
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
ProximityClusterSharingAlgorithm.java added at 1.1
diff -N ProximityClusterSharingAlgorithm.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ProximityClusterSharingAlgorithm.java	2 Jul 2008 20:49:42 -0000	1.1
@@ -0,0 +1,55 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.event.*;
+
+public class ProximityClusterSharingAlgorithm implements ClusterSharingAlgorithm {
+    // Drops off as 1/r^3
+    double m_minimumDistance; // Below this, score is always 1.0
+    double m_maximumDistance; // Above this, score is always 0.0
+    public ProximityClusterSharingAlgorithm(double minimumDistance, double maximumDistance) {
+	if (minimumDistance >= maximumDistance) { throw new AssertionError("Min dist must be < max dist"); }
+	if (minimumDistance <= 0.0) { throw new AssertionError("Min dist must be > 0"); }
+	m_minimumDistance = minimumDistance;
+	m_maximumDistance = maximumDistance;
+    }
+    public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
+	Map<Cluster,Double> outputMap = new HashMap<Cluster,Double>();
+	for (Cluster target : clusterTargets) {
+	    double distance = proximity(clusterToShare, target);
+	    if (distance < m_maximumDistance) {
+		if (distance == 0.0) { throw new AssertionError("ERROR: Distance is zero... configuration error"); }
+		double scaledDistance = distance / m_minimumDistance;
+		double weight = 1.0 / (scaledDistance*scaledDistance*scaledDistance);
+		if (weight > 1.0) { 
+		    // Don't go above 1
+		    weight = 1.0; 
+		}
+		outputMap.put(target, new Double(weight));
+		
+	    }
+	}
+	return outputMap;
+    }
+
+    protected double proximity(Cluster clus1, Cluster clus2) {
+	if (clus1.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
+	if (clus2.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
+	double minDist = 0;
+	boolean found = false;
+	for (CalorimeterHit hit1 : clus1.getCalorimeterHits()) {
+	    Hep3Vector hitPosition1 = new BasicHep3Vector(hit1.getPosition());
+	    for (CalorimeterHit hit2 : clus2.getCalorimeterHits()) {
+		if (hit1 == hit2) { throw new AssertionError("Hits overlap!"); }
+		Hep3Vector hitPosition2 = new BasicHep3Vector(hit2.getPosition());
+		double distance = VecOp.sub(hitPosition1,hitPosition2).magnitude();
+		if (distance<minDist || found==false) {
+		    found = true;
+		    minDist = distance;
+		}
+	    }
+	}
+	return minDist;
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
SharedCluster.java added at 1.1
diff -N SharedCluster.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SharedCluster.java	2 Jul 2008 20:49:42 -0000	1.1
@@ -0,0 +1,42 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import org.lcsim.event.*;
+
+public class SharedCluster {
+    Cluster m_rawCluster = null;
+    Map<Cluster, Double> m_sharedBetween = null;
+    double m_sumOfWeights = 0.0;
+    public SharedCluster(Cluster rawCluster) {
+	m_rawCluster = rawCluster;
+	m_sumOfWeights = 0.0;
+	m_sharedBetween = new HashMap<Cluster,Double>();
+    }
+    public void addShare(Cluster target, double weight) {
+	m_sharedBetween.put(target, new Double(weight));
+	m_sumOfWeights += weight;
+    }
+    public Cluster getCluster() { return m_rawCluster; }
+    public Set<Cluster> getTargetClusters() {
+	return m_sharedBetween.keySet();
+    }
+    public Double getRawWeight(Cluster target) {
+	Double weight = m_sharedBetween.get(target);
+	if (weight != null) {
+	    if (weight <= 0.0) { throw new AssertionError("ERROR: Invalid weight = "+weight); }
+	    if (weight > m_sumOfWeights) { throw new AssertionError("ERROR: Weight = "+weight+" > sum of weights = "+m_sumOfWeights); }
+	}
+	return m_sharedBetween.get(target);
+    }
+    public Double getNormalizedWeight(Cluster target) {
+	Double rawWeight = getRawWeight(target);
+	if (rawWeight == null) {
+	    return rawWeight;
+	} else {
+	    if (rawWeight.isNaN()) { throw new AssertionError("ERROR: Raw weight is NaN"); }
+	    Double normalizedWeight = new Double(rawWeight.doubleValue()/m_sumOfWeights);
+	    if (normalizedWeight.isNaN()) { throw new AssertionError("ERROR: Normalized weight is NaN. Raw weight is "+rawWeight+" and sum of weights is "+m_sumOfWeights); }
+	    return normalizedWeight;
+	}
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
SharedClusterGroup.java added at 1.1
diff -N SharedClusterGroup.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SharedClusterGroup.java	2 Jul 2008 20:49:42 -0000	1.1
@@ -0,0 +1,55 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*;
+import org.lcsim.event.*;
+
+public class SharedClusterGroup {
+    List<SharedCluster> m_sharedClusters;
+    Map<Cluster, List<SharedCluster>> m_hints;
+    ClusterSharingAlgorithm m_algorithm = null;
+    // Setup
+    public SharedClusterGroup(List<Cluster> clustersToShare, ClusterSharingAlgorithm algorithm) {
+	// Don't initialize hints map -- that gets created when we do rebuildHints()
+	m_sharedClusters = new Vector<SharedCluster>();
+	m_algorithm = algorithm;
+	for (Cluster clusterToShare : clustersToShare) {
+	    m_sharedClusters.add(new SharedCluster(clusterToShare));
+	}
+    }
+    // Create shares based on algorithm
+    public void createShares(List<Cluster> clusterTargets) {
+	for (SharedCluster clusterToShare : m_sharedClusters) {
+	    Map<Cluster,Double> shares = m_algorithm.shareCluster(clusterToShare.getCluster(), clusterTargets);
+	    for (Cluster target : shares.keySet()) {
+		clusterToShare.addShare(target, shares.get(target));
+	    }
+	}
+    }
+    // Rebuild hints table (used for lookups)
+    public void rebuildHints() {
+	m_hints = new HashMap<Cluster, List<SharedCluster>>();
+	for (SharedCluster share : m_sharedClusters) {
+	    Set<Cluster> targets = share.getTargetClusters();
+	    for (Cluster target : targets) {
+		List<SharedCluster> matchedShares = m_hints.get(target);
+		if (matchedShares == null) {
+		    matchedShares = new Vector<SharedCluster>();
+		    m_hints.put(target, matchedShares);
+		}
+		matchedShares.add(share);
+	    }
+	}
+    }
+    // Look up shares corresponding to a target cluster, relying on cache.
+    public List<SharedCluster> findContributingSharedClusters(Cluster target) {
+	return m_hints.get(target);
+    }
+    // List target clusters with contributions:
+    public Set<Cluster> findTargets() {
+	return m_hints.keySet();
+    }
+    // List all shares
+    public List<SharedCluster> listAllSharedClusters() {
+	return m_sharedClusters;
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.25 -> 1.26
diff -u -r1.25 -r1.26
--- ReclusterDTreeDriver.java	27 Jun 2008 17:00:25 -0000	1.25
+++ ReclusterDTreeDriver.java	2 Jul 2008 20:49:42 -0000	1.26
@@ -34,7 +34,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.25 2008/06/27 17:00:25 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.26 2008/07/02 20:49:42 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -469,68 +469,6 @@
 	allSharedClusters.add(sharedLeftoverHitClustersECAL);
 	allSharedClusters.add(sharedLeftoverHitClustersHCAL);
 
-	// DEBUG
-	if (false) {	    
-	    for (Cluster dTreeClus : dTreeClusters) {
-		List<Cluster> subClustersECAL = new Vector<Cluster>();
-		for (Cluster subClus : leftoverHitClustersToShareECAL) {
-		    if (treeOfSharedCluster.get(subClus) == dTreeClus) {
-			subClustersECAL.add(subClus);
-		    }
-		}
-		List<Cluster> subClustersHCAL = new Vector<Cluster>();
-		for (Cluster subClus : leftoverHitClustersToShareHCAL) {
-		    if (treeOfSharedCluster.get(subClus) == dTreeClus) {
-			subClustersHCAL.add(subClus);
-		    }
-		}
-		System.out.println("DTree cluster with "+dTreeClus.getCalorimeterHits().size()+" hits contains "+subClustersECAL.size()+" shared sub-clusters in the ECAL and "+subClustersHCAL.size()+" shared sub-clusters in the HCAL");
-		List<Cluster> targetsInThisTree = targetsInTree.get(dTreeClus);
-		if (targetsInThisTree.size()==0) {
-		    // No targets -- not interesting here
-		    continue;
-		}
-		SharedClusterGroup tmpSharedHits;
-		if (subClustersECAL.size()==0 && subClustersHCAL.size()==0) {
-		    // No hits
-		    continue;
-		} else if (subClustersECAL.size()==0 && subClustersHCAL.size()> 0) {
-		    tmpSharedHits = new SharedClusterGroup(subClustersHCAL, dTreeSharingAlgHCAL);
-		} else if (subClustersECAL.size()> 0 && subClustersHCAL.size()==0) {
-		    tmpSharedHits = new SharedClusterGroup(subClustersECAL, dTreeSharingAlgECAL);
-		} else {
-		    throw new AssertionError("Mixed cluster");
-		}
-
-		tmpSharedHits.createShares(linkableClusters);
-		tmpSharedHits.rebuildHints();
-		Set<Cluster> targets = tmpSharedHits.findTargets();
-		for (Cluster target : targetsInThisTree) {
-		    List<SharedCluster> contrib = null;
-		    if (targets.contains(target)) {
-			contrib = tmpSharedHits.findContributingSharedClusters(target);
-		    }
-		    MCParticle domPartOfTarget = quoteDominantParticle(target);
-		    double sumWeights = 0.0;
-		    double sumWeightsTM = 0.0;
-		    if (contrib != null) {
-			for (SharedCluster subClus : contrib) {
-			    MCParticle domPartOfSubClus = quoteDominantParticle(subClus.getCluster());
-			    Double weight = subClus.getNormalizedWeight(target);
-			    if (weight != null) { 
-				sumWeights += weight; 
-				double distance = proximity(subClus.getCluster(), target);
-				double rawWeight = subClus.getRawWeight(target);
-				if (domPartOfTarget == domPartOfSubClus) { sumWeightsTM += weight; }
-				System.out.println("DEBUG:   PROX="+distance+" => RAW WEIGHT "+rawWeight+" (truth="+domPartOfSubClus.getPDGID()+" with p="+domPartOfSubClus.getMomentum().magnitude()+")");
-			    }
-			}
-		    }
-		    System.out.println("    -> Target with "+target.getCalorimeterHits().size()+" has "+sumWeights+" weight of shared hits of which "+sumWeightsTM+" truth-matched (truth="+domPartOfTarget.getPDGID()+" with p="+domPartOfTarget.getMomentum().magnitude()+")");
-		}
-	    }
-	}
-
 	// Iterate to build clusters:
 	for (int iIter=0; iIter<10; iIter++) {
 	    newMapShowerComponentToTrack = new HashMap<Cluster, Track>();
@@ -852,60 +790,6 @@
 	return output;
     }
 
-
-    // In this weighting scheme
-    //   * Components not in the same DTree cluster get a significant penalty factor
-    //   * Components in the same DTree cluster get a small additive bonus (to take the minimum above zero)
-    // Watch out for the notation:
-    //   * "components" are the shared hits. These are members of one and only one tree.
-    //   * "targets" are the mips/clumps/etc. These may be members of more than one tree.
-    protected class DTreeClusterSharingAlgorithm implements ClusterSharingAlgorithm {
-	double m_minimumDistance;
-	double m_maximumDistance;
-	Map<Cluster,Cluster> m_treeOfSharedCluster;
-	Map<Cluster,List<Cluster>> m_targetsInTree;
-	// Constructor supplies a list of DTree clusters
-	public DTreeClusterSharingAlgorithm(Map<Cluster,Cluster> treeOfSharedCluster, Map<Cluster,List<Cluster>> targetsInTree, double minimumDistance, double maximumDistance) {
-	    m_treeOfSharedCluster = treeOfSharedCluster;
-	    m_targetsInTree = targetsInTree;
-	    m_minimumDistance = minimumDistance;
-	    m_maximumDistance = maximumDistance;
-	}
-	public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
-	    Cluster parentDTreeCluster = m_treeOfSharedCluster.get(clusterToShare);
-	    if (parentDTreeCluster == null) { throw new AssertionError("ERROR: DTreeClusterSharingAlgorithm was passed a cluster that's not part of a DTree."); }
-	     Map<Cluster,Double> outputMap = new HashMap<Cluster,Double>();
-	     for (Cluster target : clusterTargets) {
-		 double distance = proximity(clusterToShare, target);
-		 if (distance == 0.0) { throw new AssertionError("ERROR: Distance is zero... configuration error"); }
-		 double scaledDistance = distance / m_minimumDistance;
-		 double weight = 1.0 / (scaledDistance*scaledDistance*scaledDistance);
-		 if (weight > 1.0) { 
-		     // Don't go above 1 based on proximity
-		     weight = 1.0; 
-		 }
-		 // Now, is the target part of the same DTreeCluster?
-		 // This may return null if the target is something that doesn't come from
-		 // a DTreeCluster, e.g. a photon
-		 List<Cluster> targetsInTree = m_targetsInTree.get(parentDTreeCluster);
-		 if (targetsInTree != null && targetsInTree.contains(target)) {
-		     // From same DTreeCluster => apply additive bonus and don't impose a distance cutoff
-		     weight += 0.05;
-		     outputMap.put(target, new Double(weight));
-		 } else {
-		     // Not from same DTreeCluster => apply penalty factor and impose a distance cutoff
-		     if (distance < m_maximumDistance) {
-			 weight *= 0.2;
-			 outputMap.put(target, new Double(weight));
-		     } else {
-			 // doesn't appear in output at all
-		     }
-		 }
-	     }
-	     return outputMap;
-	}
-    }
-
     protected void debugPrintTrackInfo(List<Track> trackList, List<Track> unmatchedTracks, Map<Track,Cluster> tracksMatchedToClusters, Set<Track> uniquelyMatchedTracks, Set<Track> ambiguouslyMatchedTracks, List<Track> tweakedTracks, Set<Cluster> seeds, List<Track> tracksSortedByMomentum, Map<Track, Cluster> tweakedTracksMatchedToClusters) {
 	System.out.println("There were "+trackList.size()+" tracks in the event. Of these, "+unmatchedTracks.size()+" were unmatched and "+tracksMatchedToClusters.size()+" were matched. Of the track matches, "+uniquelyMatchedTracks.size()+" were unique and "+ambiguouslyMatchedTracks.size()+" were ambiguous. After tweaking, there were "+tweakedTracks.size()+" tracks. The event contains "+seeds.size()+" seeds.");
 	System.out.println("Here are the "+unmatchedTracks.size()+" unmatched tracks:");

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.23 -> 1.24
diff -u -r1.23 -r1.24
--- ReclusterDriver.java	17 Jun 2008 16:43:32 -0000	1.23
+++ ReclusterDriver.java	2 Jul 2008 20:49:42 -0000	1.24
@@ -37,7 +37,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.23 2008/06/17 16:43:32 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.24 2008/07/02 20:49:42 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -2098,147 +2098,6 @@
 
     ///////////////////////////////////////
 
-    protected interface ClusterSharingAlgorithm {
-	public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets);
-    }
-    protected class SharedClusterGroup {
-	List<SharedCluster> m_sharedClusters;
-	Map<Cluster, List<SharedCluster>> m_hints;
-	ClusterSharingAlgorithm m_algorithm = null;
-	// Setup
-	public SharedClusterGroup(List<Cluster> clustersToShare, ClusterSharingAlgorithm algorithm) {
-	    // Don't initialize hints map -- that gets created when we do rebuildHints()
-	    m_sharedClusters = new Vector<SharedCluster>();
-	    m_algorithm = algorithm;
-	    for (Cluster clusterToShare : clustersToShare) {
-		m_sharedClusters.add(new SharedCluster(clusterToShare));
-	    }
-	}
-	// Create shares based on algorithm
-	public void createShares(List<Cluster> clusterTargets) {
-	    for (SharedCluster clusterToShare : m_sharedClusters) {
-		Map<Cluster,Double> shares = m_algorithm.shareCluster(clusterToShare.getCluster(), clusterTargets);
-		for (Cluster target : shares.keySet()) {
-		    clusterToShare.addShare(target, shares.get(target));
-		}
-	    }
-	}
-	// Rebuild hints table (used for lookups)
-	public void rebuildHints() {
-	    m_hints = new HashMap<Cluster, List<SharedCluster>>();
-	    for (SharedCluster share : m_sharedClusters) {
-		Set<Cluster> targets = share.getTargetClusters();
-		for (Cluster target : targets) {
-		    List<SharedCluster> matchedShares = m_hints.get(target);
-		    if (matchedShares == null) {
-			matchedShares = new Vector<SharedCluster>();
-			m_hints.put(target, matchedShares);
-		    }
-		    matchedShares.add(share);
-		}
-	    }
-	}
-	// Look up shares corresponding to a target cluster, relying on cache.
-	public List<SharedCluster> findContributingSharedClusters(Cluster target) {
-	    return m_hints.get(target);
-	}
-	// List target clusters with contributions:
-	public Set<Cluster> findTargets() {
-	    return m_hints.keySet();
-	}
-	// List all shares
-	public List<SharedCluster> listAllSharedClusters() {
-	    return m_sharedClusters;
-	}
-    }
-    protected class SharedCluster {
-	Cluster m_rawCluster = null;
-	Map<Cluster, Double> m_sharedBetween = null;
-	double m_sumOfWeights = 0.0;
-	public SharedCluster(Cluster rawCluster) {
-	    m_rawCluster = rawCluster;
-	    m_sumOfWeights = 0.0;
-	    m_sharedBetween = new HashMap<Cluster,Double>();
-	}
-	public void addShare(Cluster target, double weight) {
-	    m_sharedBetween.put(target, new Double(weight));
-	    m_sumOfWeights += weight;
-	}
-	public Cluster getCluster() { return m_rawCluster; }
-	public Set<Cluster> getTargetClusters() {
-	    return m_sharedBetween.keySet();
-	}
-	public Double getRawWeight(Cluster target) {
-	    Double weight = m_sharedBetween.get(target);
-	    if (weight != null) {
-		if (weight <= 0.0) { throw new AssertionError("ERROR: Invalid weight = "+weight); }
-		if (weight > m_sumOfWeights) { throw new AssertionError("ERROR: Weight = "+weight+" > sum of weights = "+m_sumOfWeights); }
-	    }
-	    return m_sharedBetween.get(target);
-	}
-	public Double getNormalizedWeight(Cluster target) {
-	    Double rawWeight = getRawWeight(target);
-	    if (rawWeight == null) {
-		return rawWeight;
-	    } else {
-		if (rawWeight.isNaN()) { throw new AssertionError("ERROR: Raw weight is NaN"); }
-		Double normalizedWeight = new Double(rawWeight.doubleValue()/m_sumOfWeights);
-		if (normalizedWeight.isNaN()) { throw new AssertionError("ERROR: Normalized weight is NaN. Raw weight is "+rawWeight+" and sum of weights is "+m_sumOfWeights); }
-		return normalizedWeight;
-	    }
-	}
-    }
-    protected class ProximityClusterSharingAlgorithm implements ClusterSharingAlgorithm {
-	// Drops off as 1/r^3
-	double m_minimumDistance; // Below this, score is always 1.0
-	double m_maximumDistance; // Above this, score is always 0.0
-	public ProximityClusterSharingAlgorithm(double minimumDistance, double maximumDistance) {
-	    if (minimumDistance >= maximumDistance) { throw new AssertionError("Min dist must be < max dist"); }
-	    if (minimumDistance <= 0.0) { throw new AssertionError("Min dist must be > 0"); }
-	    m_minimumDistance = minimumDistance;
-	    m_maximumDistance = maximumDistance;
-	}
-	public Map<Cluster,Double> shareCluster(Cluster clusterToShare, List<Cluster> clusterTargets) {
-	    Map<Cluster,Double> outputMap = new HashMap<Cluster,Double>();
-	    for (Cluster target : clusterTargets) {
-		double distance = proximity(clusterToShare, target);
-		if (distance < m_maximumDistance) {
-		    if (distance == 0.0) { throw new AssertionError("ERROR: Distance is zero... configuration error"); }
-		    double scaledDistance = distance / m_minimumDistance;
-		    double weight = 1.0 / (scaledDistance*scaledDistance*scaledDistance);
-		    if (weight > 1.0) { 
-			// Don't go above 1
-			weight = 1.0; 
-		    }
-		    outputMap.put(target, new Double(weight));
-		    
-		    //System.out.println("DEBUG: Created a mapping from clusterToShare with "+clusterToShare.getCalorimeterHits().size()+" hits to target with "+target.getCalorimeterHits().size()+" hits -- with distance="+distance+", minDist="+m_minimumDistance+" => weight = "+weight);
-		}
-	    }
-	    //System.out.println("DEBUG: ProximityClusterSharingAlgorithm.shareCluster() running with "+clusterTargets.size()+" targets... created output map with "+outputMap.size()+" entries.");
-		return outputMap;
-	}
-	protected double proximity(Cluster clus1, Cluster clus2) {
-	    if (clus1.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
-	    if (clus2.getCalorimeterHits().size()<1) { throw new AssertionError("Empty cluster"); }
-	    double minDist = 0;
-	    boolean found = false;
-	    for (CalorimeterHit hit1 : clus1.getCalorimeterHits()) {
-		Hep3Vector hitPosition1 = new BasicHep3Vector(hit1.getPosition());
-		for (CalorimeterHit hit2 : clus2.getCalorimeterHits()) {
-		    if (hit1 == hit2) { throw new AssertionError("Hits overlap!"); }
-		    Hep3Vector hitPosition2 = new BasicHep3Vector(hit2.getPosition());
-		    double distance = VecOp.sub(hitPosition1,hitPosition2).magnitude();
-		    if (distance<minDist || found==false) {
-			found = true;
-			minDist = distance;
-		    }
-		}
-	    }
-	    return minDist;
-	}
-    }
-
     private double scoreOnProximityAndPointing(Cluster mip, Cluster clus, double thresholdForProximity) {
 	double likelihood = m_eval.getLinkLikelihoodTrackToClump(mip,clus);
 	double score = likelihood;
CVSspam 0.2.8