lcsim/src/org/lcsim/contrib/uiowa
diff -u -r1.1 -r1.2
--- ReclusterDriver.java 14 Oct 2007 22:08:10 -0000 1.1
+++ ReclusterDriver.java 15 Oct 2007 07:16:13 -0000 1.2
@@ -13,6 +13,8 @@
import org.lcsim.mc.fast.tracking.ReconTrack;
import org.lcsim.event.base.*;
import hep.physics.particle.Particle;
+import org.lcsim.recon.cluster.structural.likelihood.*;
+import org.lcsim.recon.cluster.structural.*;
public class ReclusterDriver extends Driver {
@@ -28,8 +30,10 @@
String m_inputClumps;
String m_inputSplitSkeletonClusters;
EventHeader m_event;
+ protected LikelihoodEvaluator m_eval = null;
- public ReclusterDriver(String mcList, String trackList, String muonParticles, String photonClusters, String skeletonClusters, String smallClusters, String unusedHits, String largeClusters, String mips, String clumps, String splitSkeletonClusters) {
+ public ReclusterDriver(String mcList, String trackList, String muonParticles, String photonClusters, String skeletonClusters, String smallClusters, String unusedHits, String largeClusters, String mips, String clumps, String splitSkeletonClusters, LikelihoodEvaluator eval) {
+ m_eval = eval;
m_mcList = mcList;
m_inputTrackList = trackList;
m_inputMuonParticles = muonParticles;
@@ -427,31 +431,34 @@
//
// To keep things somewhat bounded, need to impose distance cut-offs.
// Scoring normalization: 1.0 indicates a link that you'd definitely want to make. Cap at 1.0
+ //
+ // We also need to watch out for a "swamping" problem.
+ // Sometimes it makes sense to add in a subcluster even if it takes the total cluster energy
+ // over nominal. (Maybe that link is better and some of the current ones are bad.) But if the
+ // energy of the cluster to be added is obviously crazy (e.g. such that seed + new cluster alone
+ // would go out of bounds) then we should clearly not add it.
- iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds);
+ iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
printStatus("After third pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
- iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds);
+ iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
printStatus("After fourth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
- iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds);
+ iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
printStatus("After fifth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
- iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds);
+ iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
printStatus("After sixth pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
- /*
- for (Cluster seed : seeds) {
- List<Cluster> wholeCluster = new Vector<Cluster>();
- wholeCluster.add(seed);
+ adjustThresholds(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, seeds, seedClusterTrackEnergy);
+ iterateOnce(mapSeedsToThresholds, mapHitsToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapPhotonClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, targetMap, seeds, mips, clumps, skeletonClusters, seedClusterTrackEnergy);
+ printStatus("After seventh pass:", seeds, seedClusterTrackEnergy, mapPhotonClustersToSeeds, mapSmallClustersToSeeds, mapLargeClustersToSeeds, mapMipsToSeeds, mapClumpsToSeeds, mapHitsToSeeds, clustersMatchedToTracks, mapSeedsToThresholds);
+
+ // ... now write out!
- double trackEnergy = seedClusterTrackEnergy.get(seed);
- seedMap.put(seed, wholeClsuter);
- }
- */
}
void adjustThresholds(Map<Cluster,Double> mapSeedsToThresholds,
@@ -516,7 +523,11 @@
Map<Cluster,Cluster> mapMipsToSeeds,
Map<Cluster,Cluster> mapClumpsToSeeds,
Map<List<Cluster>, Map<Cluster,Cluster>> targetMap,
- Collection<Cluster> seeds
+ Collection<Cluster> seeds,
+ List<Cluster> mips,
+ List<Cluster> clumps,
+ List<Cluster> skeletonClusters,
+ Map<Cluster,Double> seedClusterTrackEnergy
) {
// Compute scores
@@ -526,35 +537,59 @@
Map<Cluster, Map<Cluster,Double>> isolatedHitToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapHitsToSeeds.keySet(), targetMap, seeds);
Map<Cluster, Map<Cluster,Double>> smallClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapSmallClustersToSeeds.keySet(), targetMap, seeds);
- Map<Cluster, Map<Cluster,Double>> largeClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapLargeClustersToSeeds.keySet(), targetMap, seeds);
- Map<Cluster, Map<Cluster,Double>> photonClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapPhotonClustersToSeeds.keySet(), targetMap, seeds);
- Map<Cluster, Map<Cluster,Double>> mipToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapMipsToSeeds.keySet(), targetMap, seeds);
- Map<Cluster, Map<Cluster,Double>> clumpToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapClumpsToSeeds.keySet(), targetMap, seeds);
+ //Map<Cluster, Map<Cluster,Double>> photonClusterToSeedScoreMap = scoreOnProximity(const_hitScoreThreshold, const_hitScoreScale, const_hitMaxAllowedDistance, mapPhotonClustersToSeeds.keySet(), targetMap, seeds);
+ Map<Cluster, Map<Cluster,Double>> mipToSeedScoreMap = scoreOnLikelihood(mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
+ Map<Cluster, Map<Cluster,Double>> clumpToSeedScoreMap = scoreOnLikelihood(mips, clumps, targetMap, mapSeedsToThresholds, skeletonClusters);
+ Map<Cluster, Map<Cluster,Double>> largeClusterToSeedScoreMap = scoreOnProximityAndAngle(mapLargeClustersToSeeds.keySet(), mapMipsToSeeds, mapClumpsToSeeds, mipToSeedScoreMap, clumpToSeedScoreMap, mapSeedsToThresholds, targetMap, seeds);
// Update mappings based on scores
- updateMappings(isolatedHitToSeedScoreMap, mapHitsToSeeds, mapSeedsToThresholds);
- updateMappings(smallClusterToSeedScoreMap, mapSmallClustersToSeeds, mapSeedsToThresholds);
- updateMappings(largeClusterToSeedScoreMap, mapLargeClustersToSeeds, mapSeedsToThresholds);
- updateMappings(photonClusterToSeedScoreMap, mapPhotonClustersToSeeds, mapSeedsToThresholds);
- updateMappings(mipToSeedScoreMap, mapMipsToSeeds, mapSeedsToThresholds);
- updateMappings(clumpToSeedScoreMap, mapClumpsToSeeds, mapSeedsToThresholds);
+ updateMappings(isolatedHitToSeedScoreMap, mapHitsToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
+ updateMappings(smallClusterToSeedScoreMap, mapSmallClustersToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
+ updateMappings(largeClusterToSeedScoreMap, mapLargeClustersToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
+ //updateMappings(photonClusterToSeedScoreMap, mapPhotonClustersToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
+ updateMappings(mipToSeedScoreMap, mapMipsToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
+ updateMappings(clumpToSeedScoreMap, mapClumpsToSeeds, mapSeedsToThresholds, seedClusterTrackEnergy);
}
- protected void updateMappings(Map<Cluster, Map<Cluster,Double>> clusterToSeedScoreMap, Map<Cluster,Cluster> clusterToSeedMap, Map<Cluster,Double> mapSeedsToThresholds) {
+ protected void updateMappings(Map<Cluster, Map<Cluster,Double>> clusterToSeedScoreMap, Map<Cluster,Cluster> clusterToSeedMap, Map<Cluster,Double> mapSeedsToThresholds, Map<Cluster,Double> seedClusterTrackEnergy) {
Collection<Cluster> seeds = mapSeedsToThresholds.keySet();
for (Cluster clus : clusterToSeedMap.keySet()) {
if (seeds.contains(clus)) { continue; } // don't do anything silly
+
+ // Default: no match
+ clusterToSeedMap.put(clus, null);
+
double maxReducedScore = 0.0;
Cluster bestSeedMatch = null;
+ if (clus == null) { throw new AssertionError("null cluster"); }
Map<Cluster,Double> scoreMap = clusterToSeedScoreMap.get(clus);
+ if (scoreMap == null) {
+ // No good matches for this cluster
+ continue;
+ }
for (Cluster seed : scoreMap.keySet()) {
Double seedScore = scoreMap.get(seed);
Double seedThreshold = mapSeedsToThresholds.get(seed);
double reducedScore = seedScore / seedThreshold;
+ if (seedScore < 0.0 || seedScore > 1.0) { throw new AssertionError("Invalid score of "+seedScore); }
if (reducedScore > 1.0) {
if (bestSeedMatch==null || reducedScore > maxReducedScore) {
- maxReducedScore = reducedScore;
- bestSeedMatch = seed;
+ // Last-ditch sanity check
+ List<Cluster> seedPlusCandidate = new Vector<Cluster>();
+ seedPlusCandidate.add(seed);
+ seedPlusCandidate.add(clus);
+ double trackEnergy = seedClusterTrackEnergy.get(seed);
+ double clusterEnergy = energy(seedPlusCandidate);
+ double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
+ if (clusterEnergy - trackEnergy > 2.0*trackEnergySigma) {
+ // BZZT -- reject this one as silly
+ //System.out.println("DEBUG: Rejected a match from a cluster with "+clus.getCalorimeterHits().size()+" hits to seed with "+seed.getCalorimeterHits().size()+" with score = "+seedScore+" / "+seedThreshold+" because combined energy would be "+clusterEnergy+" but track energy is only "+trackEnergy);
+ } else {
+ // Sensible-ish...
+ maxReducedScore = reducedScore;
+ bestSeedMatch = seed;
+ //System.out.println("DEBUG: Found a match from cluster with "+clus.getCalorimeterHits().size()+" hits to seed with "+seed.getCalorimeterHits().size()+" with score = "+seedScore+" / "+seedThreshold);
+ }
}
}
}
@@ -562,6 +597,273 @@
}
}
+ protected Map<Cluster, Map<Cluster,Double>> scoreOnLikelihood(List<Cluster> mips, List<Cluster> clumps, Map<List<Cluster>, Map<Cluster,Cluster>> targetMap, Map<Cluster,Double> mapSeedsToThresholds, List<Cluster> skeletonClusters)
+ {
+ // Output is a map from each mip/clump to a scoremap
+ // where the scoremap is a map from each seed to a score.
+ Map<Cluster, Map<Cluster,Double>> outputMap = new HashMap<Cluster, Map<Cluster,Double>>();
+
+ // First, build up databank of all mip & cluster pairings...
+ Map<Link,Double> potentialTrackToTrackLinks = new HashMap<Link,Double>();
+ Map<Link,Double> potentialTrackToClumpLinks = new HashMap<Link,Double>();
+ Map<Link,Double> potentialClumpToClumpLinks = new HashMap<Link,Double>();
+ Map<Link,Double> potentialLinks = new HashMap<Link,Double>();
+
+ double scaleFactorTrackToTrack = 1.0;
+ double scaleFactorTrackToClump = 1.0;
+ double scaleFactorClumpToClump = 1.0;
+
+ for (int i=0; i<mips.size(); i++) {
+ Cluster clus1 = mips.get(i);
+ // First, Track-Track links (being careful not to double-count)
+ for (int j=i+1; j<mips.size(); j++) {
+ Cluster clus2 = mips.get(j);
+ if (!checkIfSkeletonComponentsClose(skeletonClusters, clus1, clus2)) {
+ // Too far apart => ignore
+ continue;
+ }
+ Link newLink = new Link(clus1,clus2);
+ double likelihood = m_eval.getLinkLikelihoodTrackToTrack(clus1,clus2);
+ double score = likelihood / scaleFactorTrackToTrack;
+ if (score > 1.0) { score = 1.0; }
+ if (score < 0.0) { score = 0.0; }
+ potentialTrackToTrackLinks.put(newLink, new Double(score));
+ potentialLinks.put(newLink, new Double(score));
+ //System.out.println("DEBUG: Init link from MIP ("+clus1.getCalorimeterHits().size()+" hits) to MIP ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score);
+ }
+ // Next: Track-Clump links
+ for (Cluster clus2 : clumps) {
+ if (!checkIfSkeletonComponentsClose(skeletonClusters, clus1, clus2)) {
+ // Too far apart => ignore
+ continue;
+ }
+ Link newLink = new Link(clus1,clus2);
+ double likelihood = m_eval.getLinkLikelihoodTrackToClump(clus1,clus2);
+ double score = likelihood / scaleFactorTrackToClump;
+ if (score > 1.0) { score = 1.0; }
+ if (score < 0.0) { score = 0.0; }
+ potentialTrackToClumpLinks.put(newLink, new Double(score));
+ potentialLinks.put(newLink, new Double(score));
+ //System.out.println("DEBUG: Init link from MIP ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score);
+ }
+ }
+ // Finally: Clump-Clump links, again being careful of double-counting
+ for (int i=0; i<clumps.size(); i++) {
+ Cluster clus1 = clumps.get(i);
+ for (int j=i+1; j<clumps.size(); j++) {
+ Cluster clus2 = clumps.get(j);
+ if (!checkIfSkeletonComponentsClose(skeletonClusters, clus1, clus2)) {
+ // Too far apart => ignore
+ continue;
+ }
+ Link newLink = new Link(clus1,clus2);
+ double likelihood = m_eval.getLinkLikelihoodClumpToClump(clus1,clus2);
+ double score = likelihood / scaleFactorClumpToClump;
+ if (score > 1.0) { score = 1.0; }
+ if (score < 0.0) { score = 0.0; }
+ potentialClumpToClumpLinks.put(newLink, new Double(score));
+ potentialLinks.put(newLink, new Double(score));
+ //System.out.println("DEBUG: Init link from CLUMP ("+clus1.getCalorimeterHits().size()+" hits) to CLUMP ("+clus2.getCalorimeterHits().size()+" hits) with likelihood score = "+likelihood+" / "+scaleFactorTrackToTrack+" = "+score);
+ }
+ }
+
+ // For convenience, make list of all components:
+ List<Cluster> mipsAndClumps = new Vector<Cluster>();
+ mipsAndClumps.addAll(mips);
+ mipsAndClumps.addAll(clumps);
+
+ // Now we need to figure out what the best possible score is for a
+ // route from an arbitrary component to each seed. Sadly, I think this
+ // may be the Travelling Salesman problem.
+ //
+ // What if seed is neither a mip nor a clump?
+ // Find nearest mip/clump to seed and link it. (In the case that seed is a
+ // mip/clump, this is the same object.) Then give an overal score factor
+ // to take this into account (factor is 1 if seed is a mip/clump);
+
+ for (Cluster seed : mapSeedsToThresholds.keySet()) {
+ Double threshold = mapSeedsToThresholds.get(seed);
+
+ double initialHopScore = 0.0;
+ Cluster nearestComponent = null;
+ if (mips.contains(seed) || clumps.contains(seed)) {
+ nearestComponent = seed;
+ initialHopScore = 1.0;
+ } else {
+ double minProximity = 0.0;
+ double x0 = 20.0; // const_hitScoreThreshold;
+ double x1 = x0 + 40.0; // const_hitScoreScale;
+ double b = (1.41421*x0 - x1) / (-2.41421);
+ double a = (x0+b)*(x0+b);
+ for (Cluster component : mipsAndClumps) {
+ double dist = proximity(component, seed);
+ if (nearestComponent == null || dist < minProximity) {
+ minProximity = dist;
+ nearestComponent = component;
+ }
+ }
+ initialHopScore = a / ((minProximity+b)*(minProximity+b));
+ if (initialHopScore < 0.0) { initialHopScore = 0.0; }
+ if (initialHopScore > 1.0) { initialHopScore = 1.0; }
+ }
+
+ // OK, now figure out score to reach arbitrary other point.
+ Path zeroHopPath = new Path();
+ zeroHopPath.addStep(nearestComponent);
+ Map<Path,Double> zeroHopScores = new HashMap<Path,Double>();
+ zeroHopScores.put(zeroHopPath, new Double(initialHopScore));
+ Map<Path,Double> previousMap = zeroHopScores;
+
+ List<Map<Path,Double>> routes = new Vector<Map<Path,Double>>();
+ while (true) {
+ Map<Path,Double> newMap = addHop(previousMap, potentialLinks, threshold);
+ if (newMap.keySet().size()>0) {
+ routes.add(newMap);
+ previousMap = newMap;
+ } else {
+ // Can't add any more steps
+ break;
+ }
+ }
+
+ Map<Cluster,Double> scoreForEachDestination = new HashMap<Cluster,Double>();
+ for (Map<Path,Double> route : routes) {
+ for (Path currentPath : route.keySet()) {
+ Cluster dest = currentPath.finalStep();
+ Double previousScore = scoreForEachDestination.get(dest);
+ Double newScore = route.get(currentPath);
+ if (previousScore==null || newScore>previousScore) {
+ scoreForEachDestination.put(dest, newScore);
+ if (dest == null) { throw new AssertionError("Null dest"); }
+ if (newScore == null) { throw new AssertionError("Null score"); }
+ //System.out.println("DEBUG: Adding a transient likelihood link from a cluster with "+dest.getCalorimeterHits().size()+" to seed with "+currentPath.initialStep().getCalorimeterHits().size()+" hits... link took "+currentPath.getSteps().size()+" steps (including first & last clusters in chain... score="+newScore);
+ }
+ }
+ }
+ for (Cluster destination : scoreForEachDestination.keySet()) {
+ if (destination == null) { throw new AssertionError("Null destination"); }
+ Map<Cluster,Double> scoreMapForThisDestination = outputMap.get(destination);
+ if (scoreMapForThisDestination == null) {
+ scoreMapForThisDestination = new HashMap<Cluster,Double>();
+ outputMap.put(destination, scoreMapForThisDestination);
+ }
+ if (seed == null) { throw new AssertionError("Null seed"); }
+ Double score = scoreForEachDestination.get(destination);
+ if (score == null) { throw new AssertionError("Null score"); }
+ scoreMapForThisDestination.put(seed, score);
+ //System.out.println("DEBUG: Added a likelihood link from a component with "+destination.getCalorimeterHits().size()+" hits to seed with "+seed.getCalorimeterHits().size()+" with a score of "+score);
+ }
+ }
+
+ return outputMap;
+ }
+
+ protected Map<Cluster, Map<Cluster,Double>> scoreOnProximityAndAngle(Collection<Cluster> inputClusters,
+ Map<Cluster,Cluster> mapMipsToSeeds,
+ Map<Cluster,Cluster> mapClumpsToSeeds,
+ Map<Cluster, Map<Cluster,Double>> mipToSeedScoreMap,
+ Map<Cluster, Map<Cluster,Double>> clumpToSeedScoreMap,
+ Map<Cluster, Double> mapSeedsToThresholds,
+ Map<List<Cluster>, Map<Cluster,Cluster>> targetMap,
+ Collection<Cluster> seeds
+)
+ {
+ // Three pieces to the score:
+ // 1) Proximity from cluster to skeleton component
+ // 2) Score to get from skeleton component to seed
+ // 3) Angular score from seed to cluster
+
+ // Here's what we'll write out...
+ Map<Cluster, Map<Cluster,Double>> outputMap = new HashMap<Cluster, Map<Cluster,Double>>();
+
+ // Here are some constants...
+ double const_hitScoreThreshold = 20.0;
+ double const_hitScoreScale = 40.0;
+ double const_hitMaxAllowedDistance = 200.0;
+ double x0 = const_hitScoreThreshold;
+ double x1 = x0 + const_hitScoreScale;
+ double b = (1.41421*x0 - x1) / (-2.41421);
+ double a = (x0+b)*(x0+b);
+
+ for (Cluster clus : inputClusters) {
+ if (seeds.contains(clus)) { continue; } // don't do anything silly
+ Map<Cluster, Double> scoresForThisCluster = new HashMap<Cluster,Double>(); // for this destination, a map from seed to score
+ for (List<Cluster> targetList : targetMap.keySet()) {
+ Map<Cluster,Cluster> individualTargetMap = targetMap.get(targetList);
+ boolean isMip = (individualTargetMap == mapMipsToSeeds);
+ boolean isClump = (individualTargetMap == mapClumpsToSeeds);
+ if (!isMip && !isClump) {
+ // Only look at mips & clumps
+ continue;
+ }
+ if (isMip && isClump) { throw new AssertionError("failure"); }
+ for (Cluster target : targetList) {
+ // Get proximity score
+ double dist = proximity(clus, target);
+ if (dist > const_hitMaxAllowedDistance) { continue; }
+ double proximityScore = a / ((dist + b)*(dist + b));
+ if (proximityScore > 1.0) {
+ if (dist > const_hitScoreThreshold*1.01) {
+ System.out.println("DEBUG: Dist="+dist+", a="+a+", b="+b+", so score="+proximityScore);
+ double score_x0 = a / ((x0+b)*(x0+b));
+ double score_x1 = a / ((x1+b)*(x1+b));
+ System.out.println("DEBUG: Score("+x0+") = "+score_x0+", Score("+x1+") = "+score_x1);
+ throw new AssertionError("Internal consistency failure");
+ }
+ proximityScore = 1.0;
+ }
+ if (proximityScore < 0.0) { proximityScore = 0.0; }
+ Cluster seedOfTarget = targetMap.get(targetList).get(target);
+ if (seedOfTarget == null) {
+ // This one points nowhere
+ continue;
+ }
+
+ // Now get likelihood score
+ Double likelihoodScore = null;
+ Map<Cluster,Double> likelihoodScoreMap = null;
+ if (isMip) {
+ likelihoodScoreMap = mipToSeedScoreMap.get(target);
+ } else {
+ likelihoodScoreMap = clumpToSeedScoreMap.get(target);
+ }
+ if (likelihoodScoreMap == null) {
+ // No score above threshold
+ continue;
+ } else {
+ likelihoodScore = likelihoodScoreMap.get(seedOfTarget);
+ }
+ if (likelihoodScore == null) {
+ // No score above threshold
+ continue;
+ }
+ if (likelihoodScore > 1.0 || likelihoodScore < 0.0) { throw new AssertionError("Invalid score: "+likelihoodScore); }
+
+ // Now get angular score
+ Hep3Vector seedPosition = new BasicHep3Vector(seedOfTarget.getPosition()); // FIXME
+ Hep3Vector clusterPosition = new BasicHep3Vector(clus.getPosition()); // FIXME
+ Hep3Vector displacementVector = VecOp.sub(clusterPosition, seedPosition);
+ double dotProductScore = VecOp.dot(VecOp.unit(seedPosition), VecOp.unit(displacementVector));
+ if (dotProductScore < 0.0) { dotProductScore = 0.0; }
+ if (dotProductScore > 1.0) { throw new AssertionError("Invalid score: "+dotProductScore); }
+
+ // Combined score
+ double combinedScore = proximityScore * likelihoodScore * dotProductScore;
+ if (combinedScore < 0.0 || combinedScore > 1.0) { throw new AssertionError("Invalid score: "+combinedScore); }
+
+ Double previousScore = scoresForThisCluster.get(seedOfTarget);
+ if (previousScore == null || previousScore < combinedScore) {
+ scoresForThisCluster.put(seedOfTarget, new Double(combinedScore));
+ //System.out.println("Added a proximity+angle link from a cluster with "+clus.getCalorimeterHits().size()+" to a seed with "+seedOfTarget.getCalorimeterHits().size()+" via a proxy with "+target.getCalorimeterHits().size()+" with score = "+proximityScore+" * "+likelihoodScore+" * "+dotProductScore+" = "+combinedScore);
+ }
+ }
+ }
+ outputMap.put(clus, scoresForThisCluster);
+ }
+ return outputMap;
+ }
+
+
protected Map<Cluster, Map<Cluster,Double>> scoreOnProximity(double const_hitScoreThreshold, double const_hitScoreScale, double const_hitMaxAllowedDistance, Collection<Cluster> inputClusters, Map<List<Cluster>, Map<Cluster,Cluster>> targetMap, Collection<Cluster> seeds)
{
// Crummy inverse-square drop-off formula:
@@ -593,6 +895,7 @@
}
score = 1.0;
}
+ if (score < 0.0) { score = 0.0; }
Cluster seedOfTarget = targetMap.get(targetList).get(target);
if (seedOfTarget != null) {
Double previousScore = scoresForThisCluster.get(seedOfTarget);
@@ -827,4 +1130,81 @@
// Ugly...
DetailedNeutralHadronClusterEnergyCalculator m_calib = new DetailedNeutralHadronClusterEnergyCalculator();
+
+ private class Link {
+ public Link(Cluster clus1, Cluster clus2) {
+ c1 = clus1;
+ c2 = clus2;
+ }
+ public boolean contains(Cluster clus) {
+ return (clus==c1 || clus==c2);
+ }
+ public Cluster counterpart(Cluster clus) {
+ if (clus==c1) {
+ return c2;
+ } else if (clus==c2) {
+ return c1;
+ } else {
+ return null;
+ }
+ }
+ protected Cluster c1 = null;
+ protected Cluster c2 = null;
+ }
+
+ private class Path {
+ Vector<Cluster> m_steps;
+ public Path() { m_steps = new Vector<Cluster>(); }
+ public Cluster initialStep() { return m_steps.get(0); }
+ public Cluster finalStep() { return m_steps.get(m_steps.size()-1); }
+ public List<Cluster> getSteps() { return m_steps; }
+ public void addStep(Cluster s) { m_steps.add(s); }
+ public void addSteps(List<Cluster> l) { for (Cluster c : l) { addStep(c); } }
+ }
+
+ protected Map<Path,Double> addHop(Map<Path,Double> previousHopScoreMap, Map<Link,Double> potentialLinks, Double threshold) {
+ Map<Path,Double> newHopScores = new HashMap<Path,Double>();
+ for (Path oldPath : previousHopScoreMap.keySet()) {
+ Cluster previousStep = oldPath.finalStep();
+ for (Link l : potentialLinks.keySet()) {
+ if (l.contains(previousStep)) {
+ Cluster counterpart = l.counterpart(previousStep);
+ if (! oldPath.getSteps().contains(counterpart)) {
+ double scoreOfAllButLastHop = previousHopScoreMap.get(oldPath);
+ double scoreOfLastHop = potentialLinks.get(l);
+ if (scoreOfAllButLastHop < 0.0 || scoreOfAllButLastHop > 1.0) { throw new AssertionError("Invalid score: "+scoreOfAllButLastHop); }
+ if (scoreOfLastHop < 0.0 || scoreOfLastHop > 1.0) { throw new AssertionError("Invalid score: "+scoreOfLastHop); }
+ double score = scoreOfAllButLastHop * scoreOfLastHop;
+ if (score < 0.0 || score > 1.0) { throw new AssertionError("Invalid score: "+score); }
+ if (score > threshold) {
+ Path newPath = new Path();
+ newPath.addSteps(oldPath.getSteps());
+ newPath.addStep(counterpart);
+ newHopScores.put(newPath, new Double(score));
+ }
+ }
+ }
+ }
+ }
+ return newHopScores;
+ }
+
+ boolean checkIfSkeletonComponentsClose(List<Cluster> skeletons, Cluster clus1, Cluster clus2) {
+ boolean match = false;
+ for (Cluster skel : skeletons) {
+ List<Cluster> subClusters = skel.getClusters();
+ if (subClusters.contains(clus1) && subClusters.contains(clus2)) {
+ match = true;
+ }
+ }
+ if (match) {
+ // both in same skeleton => contiguously connected => close
+ return true;
+ } else {
+ // not in same skeleton... check if low proximity...
+ double distance = proximity(clus1, clus2);
+ boolean close = (distance < 100.0); // FIXME: Shouldn't be hard-coded
+ return close;
+ }
+ }
}