lcsim/src/org/lcsim/recon/cluster/mipfinder
diff -N NonProjectiveMipFinder.java
--- /dev/null 1 Jan 1970 00:00:00 -0000
+++ NonProjectiveMipFinder.java 26 Mar 2008 19:45:57 -0000 1.1
@@ -0,0 +1,1448 @@
+package org.lcsim.recon.cluster.mipfinder;
+
+import hep.physics.vec.*;
+import java.util.*;
+
+import org.lcsim.util.Driver;
+import org.lcsim.contrib.uiowa.NonTrivialPFA;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.loop.LCIODriver;
+import org.lcsim.util.*;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.util.hitmap.*;
+import org.lcsim.event.*;
+import org.lcsim.recon.cluster.structural.likelihood.LikelihoodEvaluatorWrapper;
+import org.lcsim.digisim.*;
+import org.lcsim.recon.cluster.mipfinder.*;
+import org.lcsim.geometry.*;
+
+/**
+ * Another class for finding MIP-like clusters.
+ * This one is designed to be able to handle non-projective
+ * and curving MIPs better.
+ *
+ * @author Mat Charles <[log in to unmask]>
+ * @version $Id: NonProjectiveMipFinder.java,v 1.1 2008/03/26 19:45:57 mcharles Exp $
+ */
+
+public class NonProjectiveMipFinder extends Driver implements Clusterer
+{
+ protected boolean m_debug = false;
+ protected double m_separation3D;
+ public NonProjectiveMipFinder(double separation3D) {
+ m_separation3D = separation3D;
+ }
+
+ public List<Cluster> createClusters(List<CalorimeterHit> hits) {
+ HitMap map = new HitMap();
+ for (CalorimeterHit hit : hits) {
+ map.put(hit.getCellID(), hit);
+ }
+ return subProcess2(map, map, m_separation3D);
+ }
+ public List<Cluster> createClusters(Map<Long,CalorimeterHit> map) {
+ return subProcess2(map, map, m_separation3D);
+ }
+
+ /** Internal routine to do the actual work of clustering. */
+ protected List<Cluster> subProcess2(Map<Long,CalorimeterHit> inputAcceptedHits, Map<Long,CalorimeterHit> inputHits, double allowedSeparation3D)
+ {
+ if (m_debug) {
+ int count = 0;
+ for (CalorimeterHit hit : inputAcceptedHits.values()) {
+ if (inputHits.values().contains(hit)) { count++; }
+ }
+ System.out.println("DEBUG: Of the "+inputAcceptedHits.size()+" inputAcceptedHits, "+count+" are in inputHits.");
+ count = 0;
+ for (CalorimeterHit hit : inputHits.values()) {
+ if (inputAcceptedHits.values().contains(hit)) { count++; }
+ }
+ System.out.println("DEBUG: Of the "+inputAcceptedHits.size()+" inputHits, "+count+" are in inputAcceptedHits.");
+ }
+
+ Map<Integer, List<CalorimeterHit>> isolatedHitsHcal = new HashMap<Integer, List<CalorimeterHit>>();
+ Map<Integer, List<CalorimeterHit>> semiIsolatedHitsHcal = new HashMap<Integer, List<CalorimeterHit>>();
+ Map<CalorimeterHit,CalorimeterHit> doubleHitMapHcal = new HashMap<CalorimeterHit,CalorimeterHit>();
+ findIsolatedAndSemiIsolatedHits(inputAcceptedHits, inputHits, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal);
+
+ List<Stub> twoLayerStubs = findTwoLayerStubs(isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D);
+ if (m_debug) { System.out.println("DEBUG: Found "+twoLayerStubs.size()+" two-layer stubs."); }
+ long timeAfterFindTwoLayerStubs = Calendar.getInstance().getTimeInMillis();
+
+ for (Stub stub : twoLayerStubs) {
+ for (int layer=stub.getInnerLayer(); layer<=stub.getOuterLayer(); layer++) {
+ Collection<CalorimeterHit> hitsInLayer = stub.getHitsInLayer(layer);
+ if (hitsInLayer != null) {
+ if (hitsInLayer.size()>1) {
+ if (hitsInLayer.size() > 2) { throw new AssertionError("Book-keeping error"); }
+ Iterator<CalorimeterHit> iter = hitsInLayer.iterator();
+ CalorimeterHit hit1 = iter.next();
+ CalorimeterHit hit2 = iter.next();
+ if (getLayer(hit1) != layer) { throw new AssertionError("Book-keeping error"); }
+ if (getLayer(hit2) != layer) { throw new AssertionError("Book-keeping error"); }
+ List<CalorimeterHit> isolatedHitsInLayer = isolatedHitsHcal.get(layer);
+ List<CalorimeterHit> semiIsolatedHitsInLayer = semiIsolatedHitsHcal.get(layer);
+ if (isolatedHitsInLayer != null && isolatedHitsInLayer.contains(hit1)) { throw new AssertionError("2 hits in layer "+layer+", but hit is marked as isolated. Stub contains "+stub.getLayersCount()+" layers."); }
+ if (isolatedHitsInLayer != null && isolatedHitsInLayer.contains(hit2)) { throw new AssertionError("2 hits in layer "+layer+", but hit is marked as isolated. Stub contains "+stub.getLayersCount()+" layers."); }
+ if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit1))) { throw new AssertionError("2 hits in layer "+layer+", but hit not is marked as semi-isolated. Stub contains "+stub.getLayersCount()+" layers."); }
+ if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit2))) { throw new AssertionError("2 hits in layer"+layer+", but hit not is marked as semi-isolated. Stub contains "+stub.getLayersCount()+" layers.");
+ }
+ CalorimeterHit doubleHitOfHit1 = doubleHitMapHcal.get(hit1);
+ CalorimeterHit doubleHitOfHit2 = doubleHitMapHcal.get(hit2);
+ if (doubleHitOfHit1 != hit2) { throw new AssertionError("2 hits in layer, but DoubleHit(hit1) != hit2"); }
+ if (doubleHitOfHit2 != hit1) { throw new AssertionError("2 hits in layer, but DoubleHit(hit2) != hit1"); }
+ }
+ }
+ }
+ }
+
+ double minimumDotProduct = 0.95;
+ List<Stub> multiLayerStubs = new Vector<Stub>();
+ Set<Stub> usedTwoLayerStubs = new HashSet<Stub>();
+ for (Stub twoLayerStub : twoLayerStubs) {
+ if (usedTwoLayerStubs.contains(twoLayerStub)) {
+ // Already included this one
+ continue;
+ }
+ Stub currentStub = twoLayerStub;
+ usedTwoLayerStubs.add(twoLayerStub);
+ while(true) {
+ List<Stub> stubCandidatesExpandingOutward = expandStubSimple(currentStub, +1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+ if (stubCandidatesExpandingOutward.size() == 1) {
+ currentStub = stubCandidatesExpandingOutward.get(0);
+ continue;
+ }
+ List<Stub> stubCandidatesExpandingInward = expandStubSimple(currentStub, -1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+ if (stubCandidatesExpandingInward.size() == 1) {
+ currentStub = stubCandidatesExpandingInward.get(0);
+ continue;
+ }
+ // Wasn't able to find an unambiguous expansion for +- 1 layer.
+ // Consider skipping a layer, provided that
+ // * we had hits in the last 3 layers
+ // * we find hits in 2+ layers after the gap
+ int outerLayer = currentStub.getOuterLayer();
+ boolean hitInOuterLayer = (currentStub.getHitsInLayer(outerLayer) != null && currentStub.getHitsInLayer(outerLayer).size()>0);
+ boolean hitInOuterLayerMinusOne = (currentStub.getHitsInLayer(outerLayer-1) != null && currentStub.getHitsInLayer(outerLayer-1).size()>0);
+ boolean hitInOuterLayerMinusTwo = (currentStub.getHitsInLayer(outerLayer-2) != null && currentStub.getHitsInLayer(outerLayer-2).size()>0);
+ if (!hitInOuterLayer) { throw new AssertionError("Book-keeping error"); }
+ if (hitInOuterLayer && hitInOuterLayerMinusOne && hitInOuterLayerMinusTwo) {
+ // Hits in last 3 layers... consider skipping one
+ // Watch out -- is the 3D separation still sensible? -- FIXME
+ List<Stub> stubCandidatesExpandingOutwardTwoLayers = expandStubSimple(currentStub, +2, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+ Stub candidateForNewStub = null;
+ if (stubCandidatesExpandingOutwardTwoLayers.size() == 1) {
+ candidateForNewStub = stubCandidatesExpandingOutwardTwoLayers.get(0);
+ }
+ if (candidateForNewStub != null) {
+ // Require another layer in the same direction
+ List<Stub> stubCandidatesExpandingOutwardAnotherLayer = expandStubSimple(candidateForNewStub, +1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+ if (stubCandidatesExpandingOutwardAnotherLayer.size() == 1) {
+ // Yes, this is valid
+ currentStub = stubCandidatesExpandingOutwardAnotherLayer.get(0);
+ continue;
+ }
+ }
+ }
+ int innerLayer = currentStub.getInnerLayer();
+ boolean hitInInnerLayer = (currentStub.getHitsInLayer(innerLayer) != null && currentStub.getHitsInLayer(innerLayer).size()>0);
+ boolean hitInInnerLayerPlusOne = (currentStub.getHitsInLayer(innerLayer+1) != null && currentStub.getHitsInLayer(innerLayer+1).size()>0);
+ boolean hitInInnerLayerPlusTwo = (currentStub.getHitsInLayer(innerLayer+2) != null && currentStub.getHitsInLayer(innerLayer+2).size()>0);
+ if (!hitInInnerLayer) { throw new AssertionError("Book-keeping error"); }
+ if (hitInInnerLayer && hitInInnerLayerPlusOne && hitInInnerLayerPlusTwo) {
+ // Hits in last 3 layers... consider skipping one
+ // Watch out -- is the 3D separation still sensible? -- FIXME
+ List<Stub> stubCandidatesExpandingInwardTwoLayers = expandStubSimple(currentStub, -2, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+ Stub candidateForNewStub = null;
+ if (stubCandidatesExpandingInwardTwoLayers.size() == 1) {
+ candidateForNewStub = stubCandidatesExpandingInwardTwoLayers.get(0);
+ }
+ if (candidateForNewStub != null) {
+ // Require another layer in the same direction
+ List<Stub> stubCandidatesExpandingInwardAnotherLayer = expandStubSimple(candidateForNewStub, -1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+ if (stubCandidatesExpandingInwardAnotherLayer.size() == 1) {
+ // Yes, this is valid
+ currentStub = stubCandidatesExpandingInwardAnotherLayer.get(0);
+ continue;
+ }
+ }
+ }
+ // Wasn't able to find an unambiguous expansion
+ break;
+ }
+ if (currentStub.getLayersCount()>2) {
+ // Extended it to a useful size -- keep this one.
+ multiLayerStubs.add(currentStub);
+
+ // Now go back and check off any 2-layer stubs inside it:
+ Collection<CalorimeterHit> allHitsInCurrentStub = currentStub.getAllHits();
+ for (Stub testTwoLayerStub : twoLayerStubs) {
+ if (usedTwoLayerStubs.contains(testTwoLayerStub)) {
+ // Already flagged
+ continue;
+ }
+ if (testTwoLayerStub.getInnerLayer() >= currentStub.getInnerLayer() && testTwoLayerStub.getOuterLayer() <= currentStub.getOuterLayer()) {
+ Collection<CalorimeterHit> allHitsInTwoLayerStub = testTwoLayerStub.getAllHits();
+ boolean exactMatch = true;
+ for (CalorimeterHit hit : allHitsInTwoLayerStub) {
+ if (!allHitsInCurrentStub.contains(hit)) {
+ // Not an exact match
+ exactMatch = false;
+ break;
+ }
+ }
+ if (exactMatch) {
+ // Flag it
+ usedTwoLayerStubs.add(testTwoLayerStub);
+ }
+ }
+ }
+ }
+ }
+
+ if (m_debug) { System.out.println("There are now "+multiLayerStubs.size()+" multi-layer stubs found. "+usedTwoLayerStubs.size()+"/"+twoLayerStubs.size()+" two-layer stubs flagged as used."); }
+
+ for (Stub stub : multiLayerStubs) {
+ for (int layer=stub.getInnerLayer(); layer<=stub.getOuterLayer(); layer++) {
+ Collection<CalorimeterHit> hitsInLayer = stub.getHitsInLayer(layer);
+ if (hitsInLayer != null) {
+ if (hitsInLayer.size()>1) {
+ if (hitsInLayer.size() > 2) { throw new AssertionError("Book-keeping error"); }
+ Iterator<CalorimeterHit> iter = hitsInLayer.iterator();
+ CalorimeterHit hit1 = iter.next();
+ CalorimeterHit hit2 = iter.next();
+ List<CalorimeterHit> isolatedHitsInLayer = isolatedHitsHcal.get(layer);
+ List<CalorimeterHit> semiIsolatedHitsInLayer = semiIsolatedHitsHcal.get(layer);
+ if (isolatedHitsInLayer!=null && isolatedHitsInLayer.contains(hit1)) { throw new AssertionError("2 hits in layer, but hit is marked as isolated."); }
+ if (isolatedHitsInLayer!=null && isolatedHitsInLayer.contains(hit2)) { throw new AssertionError("2 hits in layer, but hit is marked as isolated."); }
+ if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit1))) { throw new AssertionError("2 hits in layer, but hit not is marked as semi-isolated."); }
+ if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit2))) { throw new AssertionError("2 hits in layer, but hit not is marked as semi-isolated."); }
+ CalorimeterHit doubleHitOfHit1 = doubleHitMapHcal.get(hit1);
+ CalorimeterHit doubleHitOfHit2 = doubleHitMapHcal.get(hit2);
+ if (doubleHitOfHit1 != hit2) { throw new AssertionError("2 hits in layer, but DoubleHit(hit1) != hit2"); }
+ if (doubleHitOfHit2 != hit1) { throw new AssertionError("2 hits in layer, but DoubleHit(hit2) != hit1"); }
+ }
+ }
+ }
+ }
+
+ // Look for identical stubs
+ Set<Integer> identicalStubsToPurge = new HashSet<Integer>();
+ for (int i=0; i<multiLayerStubs.size(); i++) {
+ Stub stub1 = multiLayerStubs.get(i);
+ for (int j=i+1; j<multiLayerStubs.size(); j++) {
+ Stub stub2 = multiLayerStubs.get(j);
+ if (stub1.equals(stub2)) {
+ identicalStubsToPurge.add(j);
+ if (m_debug) { System.out.println("Flagged ["+j+"] for removal, since it exactly matches ["+i+"]"); }
+ }
+ }
+ }
+ List<Stub> nonIdenticalMultiLayerStubs = new Vector<Stub>();
+ for (int i=0; i<multiLayerStubs.size(); i++) {
+ if (! identicalStubsToPurge.contains(i) ) {
+ Stub stub = multiLayerStubs.get(i);
+ nonIdenticalMultiLayerStubs.add(stub);
+ } else {
+ if (m_debug) { System.out.println("Removed ["+i+"]"); }
+ }
+ }
+
+ if (m_debug) { System.out.println("After removing identical stubs, there are "+nonIdenticalMultiLayerStubs.size()+" multi-layer stubs left."); }
+
+ // Look for near-identical stubs
+ List<Stub> uniqueMultiLayerStubs = new Vector<Stub>();
+ {
+ Map<Stub,Set<Stub>> mapInputToOutputStubs = new HashMap<Stub,Set<Stub>>();
+ for (Stub stub : nonIdenticalMultiLayerStubs) {
+ Set<Stub> tmpList = new HashSet<Stub>();
+ tmpList.add(stub);
+ mapInputToOutputStubs.put(stub,tmpList);
+ }
+ for (int i=0; i<nonIdenticalMultiLayerStubs.size(); i++) {
+ Stub stub1 = nonIdenticalMultiLayerStubs.get(i);
+ for (int j=i+1; j<nonIdenticalMultiLayerStubs.size(); j++) {
+ Stub stub2 = nonIdenticalMultiLayerStubs.get(j);
+ if (stub1.equalsAllowingForSkippedLayersAndDoubleHits(stub2)) {
+ // Need to merge them.
+ Set<Stub> stub1Target = mapInputToOutputStubs.get(stub1);
+ Set<Stub> stub2Target = mapInputToOutputStubs.get(stub2);
+ Set<Stub> mergedTarget = new HashSet<Stub>();
+ mergedTarget.addAll(stub1Target);
+ mergedTarget.addAll(stub2Target);
+ for (Stub targetStub : mergedTarget) {
+ mapInputToOutputStubs.put(targetStub, mergedTarget);
+ }
+ }
+ }
+ }
+ for (Stub stub : mapInputToOutputStubs.keySet()) {
+ Set<Stub> targets = mapInputToOutputStubs.get(stub);
+ if (!targets.contains(stub)) { throw new AssertionError("Book-keeping error"); }
+ }
+ for (Set<Stub> targets : mapInputToOutputStubs.values()) {
+ for (Stub target : targets) {
+ Set<Stub> targetOfTarget = mapInputToOutputStubs.get(target);
+ if (targetOfTarget != targets) { throw new AssertionError("Book-keeping error"); }
+ }
+ }
+ Set<Stub> alreadyCountedStubs = new HashSet<Stub>();
+ for (Stub stub : mapInputToOutputStubs.keySet()) {
+ if (alreadyCountedStubs.contains(stub)) {
+ continue;
+ } else {
+ Set<Stub> targets = mapInputToOutputStubs.get(stub);
+ if (targets.size() < 1) {
+ throw new AssertionError("Book-keeping error");
+ } else if (targets.size() == 1) {
+ uniqueMultiLayerStubs.addAll(targets);
+ } else {
+ Stub mergedStub = mergeStubs(targets);
+ uniqueMultiLayerStubs.add(mergedStub);
+ }
+ alreadyCountedStubs.addAll(targets);
+ }
+ }
+ }
+
+ if (m_debug) { System.out.println("After removing near-identical stubs, there are "+uniqueMultiLayerStubs.size()+" multi-layer stubs left."); }
+
+ // Check for subsets
+ // If Stub1 is an exact subset of Stub2, we'll simply discard Stub1.
+ // Note that this is trickier if it's not an exact subset (e.g. single
+ // vs double hit), but we don't cover that case here.
+ List<Stub> uniqueMultiLayerStubsNoSubsets = new Vector<Stub>();
+ {
+ Set<Integer> stubsToDiscard = new HashSet<Integer>();
+ for (int i=0; i<uniqueMultiLayerStubs.size(); i++) {
+ Stub stub1 = uniqueMultiLayerStubs.get(i);
+ for (int j=i+1; j<uniqueMultiLayerStubs.size(); j++) {
+ Stub stub2 = uniqueMultiLayerStubs.get(j);
+ boolean stub1IsSubsetOfStub2 = stub1.isSubsetOfOrEquals(stub2);
+ boolean stub2IsSubsetOfStub1 = stub2.isSubsetOfOrEquals(stub1);
+ if (stub2IsSubsetOfStub1) {
+ if (m_debug) { System.out.println("DEBUG: Stub2 [j="+j+"] covering layers "+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()+" is a subset of Stub1[i="+i+"] covering layers "+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()); }
+ stubsToDiscard.add(j);
+ }
+ if (stub1IsSubsetOfStub2 && !stub2IsSubsetOfStub1) {
+ if (m_debug) { System.out.println("DEBUG: Stub1 [i="+i+"] covering layers "+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+" is a subset of Stub2[j="+j+"] covering layers "+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()); }
+ stubsToDiscard.add(i);
+ }
+ }
+ }
+ for (int i=0; i<uniqueMultiLayerStubs.size(); i++) {
+ if (!stubsToDiscard.contains(i)) {
+ Stub stub = uniqueMultiLayerStubs.get(i);
+ uniqueMultiLayerStubsNoSubsets.add(stub);
+ }
+ }
+ }
+
+ if (m_debug) { System.out.println("After removing subset stubs, there are "+uniqueMultiLayerStubsNoSubsets.size()+" multi-layer stubs left."); }
+
+ // Now check for near-subsets (allowing for double hits)
+ List<Stub> uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits = new Vector<Stub>();
+ {
+ Map<Integer, Set<Integer>> mergeTargets = new HashMap<Integer, Set<Integer>>();
+ for (int i=0; i<uniqueMultiLayerStubsNoSubsets.size(); i++) {
+ Stub stub1 = uniqueMultiLayerStubsNoSubsets.get(i);
+ for (int j=i+1; j<uniqueMultiLayerStubsNoSubsets.size(); j++) {
+ Stub stub2 = uniqueMultiLayerStubsNoSubsets.get(j);
+ boolean stub1IsSubsetOfStub2 = stub1.isSubsetOfOrEqualsAllowingForDoubleHits(stub2);
+ boolean stub2IsSubsetOfStub1 = stub2.isSubsetOfOrEqualsAllowingForDoubleHits(stub1);
+ if (stub2IsSubsetOfStub1 || stub1IsSubsetOfStub2) {
+ Set<Integer> targetsOfStub1 = mergeTargets.get(stub1);
+ Set<Integer> targetsOfStub2 = mergeTargets.get(stub2);
+ Set<Integer> combinedTargets = new HashSet<Integer>();
+ if (targetsOfStub1 != null) {
+ combinedTargets.addAll(targetsOfStub1);
+ }
+ if (targetsOfStub2 != null) {
+ combinedTargets.addAll(targetsOfStub2);
+ }
+ combinedTargets.add(i);
+ combinedTargets.add(j);
+ for (Integer k : combinedTargets) {
+ mergeTargets.put(k, combinedTargets);
+ }
+ }
+ }
+ }
+ // First handle the easy cases:
+ Set<Integer> mergeTargetsKeySet = mergeTargets.keySet();
+ for (int i=0; i<uniqueMultiLayerStubsNoSubsets.size(); i++) {
+ Stub stub = uniqueMultiLayerStubsNoSubsets.get(i);
+ if ( ! mergeTargetsKeySet.contains(i) ) {
+ // This one wasn't attached to anything else -- just add it
+ uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.add(stub);
+ }
+ }
+ // Now go back and do the ones that need merging, being
+ // careful not to double-count:
+ Set<Integer> usedStubs = new HashSet<Integer>();
+ for (Integer i : mergeTargetsKeySet) {
+ if (usedStubs.contains(i)) {
+ continue;
+ }
+ Set<Integer> targets = mergeTargets.get(i);
+ Set<Stub> targetStubs = new HashSet<Stub>();
+ for (Integer j : targets) {
+ targetStubs.add(uniqueMultiLayerStubsNoSubsets.get(j));
+ }
+ Stub mergedStub = mergeStubs(targetStubs);
+ uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.add(mergedStub);
+ usedStubs.addAll(targets);
+ }
+ }
+
+ if (m_debug) { System.out.println("After removing subset stubs (allowing for double-hits), there are "+uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.size()+" multi-layer stubs left."); }
+
+ // Look for overlaps...
+ for (int i=0; i<uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.size(); i++) {
+ Stub stub1 = uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.get(i);
+ if (stub1.getLayersCount() > 3) {
+ Collection<CalorimeterHit> allHitsInStub1 = stub1.getAllHits();
+ for (int j=0; j<uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.size(); j++) {
+ if (i == j) { continue; }
+ Stub stub2 = uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.get(j);
+ Collection<CalorimeterHit> allHitsInStub2 = stub2.getAllHits();
+ boolean overlap = false;
+ for (CalorimeterHit hit : allHitsInStub2) {
+ if (allHitsInStub1.contains(hit)) {
+ overlap = true;
+ break;
+ }
+ }
+ if (overlap) {
+ String printme = new String("Overlap between stub["+i+"] with "+stub1.getLayersCount()+" layers ("+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+") and stub["+j+"] with "+stub2.getLayersCount()+" layers ("+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()+")\n");
+ int innerLayer = stub1.getInnerLayer();
+ int outerLayer = stub1.getOuterLayer();
+ if (stub2.getInnerLayer() < innerLayer) { innerLayer = stub2.getInnerLayer(); }
+ if (stub2.getOuterLayer() > outerLayer) { outerLayer = stub2.getOuterLayer(); }
+ boolean foundHitInStub1ButNotStub2 = false;
+ boolean foundHitInStub2ButNotStub1 = false;
+ boolean foundLayerWhereStubsHaveNonOverlappingHits = false;
+ for (int layer=innerLayer; layer<=outerLayer; layer++) {
+ Collection<CalorimeterHit> stub1HitsInLayer = stub1.getHitsInLayer(layer);
+ Collection<CalorimeterHit> stub2HitsInLayer = stub2.getHitsInLayer(layer);
+ // Check some stuff
+ boolean flagLayerGoodMatch = false;
+ boolean flagLayerBadMatch = false;
+ if (stub1HitsInLayer != null && stub1HitsInLayer.size() > 0 && stub2HitsInLayer == null) {
+ foundHitInStub1ButNotStub2 = true;
+ } else if (stub2HitsInLayer != null && stub2HitsInLayer.size() > 0 && stub1HitsInLayer == null) {
+ foundHitInStub2ButNotStub1 = true;
+ } else if (stub1HitsInLayer == null && stub2HitsInLayer == null) {
+ // This can happen if both skipped that layer
+ printme += "[debug: both no hits/layer skip for layer "+layer+"]\n";
+ //throw new AssertionError("Book-keeping error");
+ } else if (stub2HitsInLayer != null && stub2HitsInLayer.size() > 0 && stub1HitsInLayer != null && stub1HitsInLayer.size() > 0) {
+ // Both have hits in this layer
+ boolean foundOverlapInThisLayer = false;
+ for (CalorimeterHit hit : stub1HitsInLayer) {
+ if (stub2HitsInLayer.contains(hit)) {
+ foundOverlapInThisLayer = true;
+ }
+ }
+ if (foundOverlapInThisLayer) {
+ flagLayerGoodMatch = true;
+ } else {
+ foundLayerWhereStubsHaveNonOverlappingHits = true;
+ foundHitInStub1ButNotStub2 = true;
+ foundHitInStub2ButNotStub1 = true;
+ flagLayerBadMatch = true;
+ }
+ } else {
+ throw new AssertionError("Book-keeping error");
+ }
+ printme += " * L"+layer+" ";
+ printme += "Stub1=[";
+ if (stub1HitsInLayer == null) {
+ printme += "empty";
+ } else {
+ for (CalorimeterHit hit : stub1HitsInLayer) {
+ printme += hit.getCellID() + "/";
+ }
+ }
+ printme += "], Stub2=[";
+ if (stub2HitsInLayer == null) {
+ printme += "empty";
+ } else {
+ for (CalorimeterHit hit : stub2HitsInLayer) {
+ printme += hit.getCellID() + "/";
+ }
+ }
+ if (flagLayerGoodMatch) {
+ printme += " -- good match";
+ }
+ if (flagLayerBadMatch) {
+ printme += " -- doesn't match";
+ }
+ printme += "]\n";
+ }
+ boolean interestingFlag = false;
+ if (foundHitInStub1ButNotStub2 && !foundHitInStub2ButNotStub1) {
+ interestingFlag = true;
+ printme += " --> Looks like Stub2 is a subset of Stub1.\n";
+ } else if (!foundHitInStub1ButNotStub2 && foundHitInStub2ButNotStub1) {
+ interestingFlag = true;
+ printme += " --> Looks like Stub1 is a subset of Stub2.\n";
+ } else if (!foundLayerWhereStubsHaveNonOverlappingHits) {
+ interestingFlag = true;
+ printme += " --> Looks like they could be compatible.";
+ printme += " Flags: equals=";
+ printme += (stub1.equals(stub2));
+ printme += ", near-equals=";
+ printme += (stub1.equalsAllowingForSkippedLayersAndDoubleHits(stub2));
+ printme += "Reverse flags: equals=";
+ printme += (stub2.equals(stub1));
+ printme += ", near-equals=";
+ printme += (stub2.equalsAllowingForSkippedLayersAndDoubleHits(stub1));
+ printme += "\n";
+ }
+ interestingFlag = true; // Force heavy debug
+ if (interestingFlag) {
+ if (m_debug) { System.out.println(printme); }
+ }
+ }
+ }
+ }
+ }
+
+ // Now, which were the largest stubs in the event?
+ Map<Integer, Set<Stub>> mapLayerCountToStubs = new HashMap<Integer, Set<Stub>>();
+ for (Stub stub : uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits) {
+ int layerCount = stub.getLayersCount();
+ Set<Stub> stubsWithThisLayerCount = mapLayerCountToStubs.get(layerCount);
+ if (stubsWithThisLayerCount == null) {
+ stubsWithThisLayerCount = new HashSet<Stub>();
+ mapLayerCountToStubs.put(layerCount, stubsWithThisLayerCount);
+ }
+ stubsWithThisLayerCount.add(stub);
+ }
+ List<Integer> layerCountList = new Vector<Integer>();
+ layerCountList.addAll(mapLayerCountToStubs.keySet());
+ Collections.sort(layerCountList, Collections.reverseOrder());
+ List<Stub> sortedStubs = new Vector<Stub>();
+ for (Integer i : layerCountList) {
+ Set<Stub> stubs = mapLayerCountToStubs.get(i);
+ sortedStubs.addAll(stubs);
+ }
+
+ // Loop over stubs & create final output list
+ List<Stub> finalOutputStubs = new Vector<Stub>();
+ Set<Integer> vetoedStubs = new HashSet<Integer>();
+ for (int i=0; i<sortedStubs.size(); i++) {
+ if (vetoedStubs.contains(i)) { continue; }
+ Stub stub1 = sortedStubs.get(i);
+ if (stub1.getLayersCount() < 4) {
+ // Too small -- skip. [Could probably just break here]
+ continue;
+ }
+ Collection<CalorimeterHit> allHitsInStub1 = stub1.getAllHits();
+ if (m_debug) { System.out.println(i+": Stub with "+stub1.getLayersCount()+" layers ("+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+")"); }
+ Set<Integer> overlappingCompatibleStubs = new HashSet<Integer>();
+ for (int j=i+1; j<sortedStubs.size(); j++) {
+ if (vetoedStubs.contains(j)) { continue; }
+ Stub stub2 = sortedStubs.get(j);
+ if (stub1.overlaps(stub2)) {
+ String printme = new String("Overlap between stub["+i+"] with "+stub1.getLayersCount()+" layers ("+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+") and stub["+j+"] with "+stub2.getLayersCount()+" layers ("+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()+")");
+ if (stub1.getInnerLayer() > stub2.getInnerLayer() || stub1.getOuterLayer() < stub2.getOuterLayer()) {
+ if (stub1.compatible(stub2, doubleHitMapHcal)) {
+ printme += " -- compatible!";
+ overlappingCompatibleStubs.add(j);
+ }
+ } else {
+ printme += " -- enclosed";
+ }
+ if (m_debug) { System.out.println(printme); }
+ vetoedStubs.add(j);
+ }
+ }
+ // Now, we consider merging some of the overlapping,
+ // compatible stubs in. We basically look at two cases:
+ // (1) Large overlapping, compatible stubs. Here the
+ // requirement is at least 4 non-overlapping layers.
+ // These take priority (starting with the ones with the
+ // most non-overlapping layers).
+ // (2) Small overlapping, compatible stubs. Here the
+ // requirement is at least 2 overlapping layers. We
+ // also require no ambiguity -- i.e. if there are
+ // multiple ones that could be attached but which are
+ // not mutually compatible, we don't use any of them.
+ Map<Integer,Integer> mapIndexToNumNonOverlappingLayers = new HashMap<Integer,Integer>();
+ List<Integer> largeStubs = new Vector<Integer>();
+ List<Integer> smallStubs = new Vector<Integer>();
+ for (Integer j : overlappingCompatibleStubs) {
+ Stub otherStub = sortedStubs.get(j);
+ int countNonOverlappingLayers = 0;
+ int countOverlappingLayers = 0;
+ for (int layer = otherStub.getInnerLayer(); layer <= otherStub.getOuterLayer(); layer++) {
+ if (otherStub.getHitsInLayer(layer) != null && otherStub.getHitsInLayer(layer).size() > 0) {
+ if (stub1.getHitsInLayer(layer) == null || stub1.getHitsInLayer(layer).size()==0) {
+ countNonOverlappingLayers++;
+ } else {
+ countOverlappingLayers++;
+ }
+ }
+ }
+ mapIndexToNumNonOverlappingLayers.put(j, countNonOverlappingLayers);
+ if (countNonOverlappingLayers >= 4) {
+ largeStubs.add(j);
+ } else if (countNonOverlappingLayers >= 1 && countOverlappingLayers >= 2) {
+ smallStubs.add(j);
+ }
+ }
+ // Sort list of large stubs by size (lazy coding)
+ for (int iSort = 0; iSort < largeStubs.size(); iSort++) {
+ Integer iSortIndex = largeStubs.get(iSort);
+ for (int jSort = iSort+1; jSort < largeStubs.size(); jSort++) {
+ Integer jSortIndex = largeStubs.get(jSort);
+ if (mapIndexToNumNonOverlappingLayers.get(jSortIndex) > mapIndexToNumNonOverlappingLayers.get(iSortIndex)) {
+ largeStubs.set(iSort, jSortIndex);
+ largeStubs.set(jSort, iSortIndex);
+ }
+ }
+ }
+ // Find & accept, starting with large stubs:
+ Set<Integer> acceptedMerges = new HashSet<Integer>();
+ for (Integer overlappingLargeStubIndex : largeStubs) {
+ // Has this one already been excluded?
+ boolean excluded = false;
+ Stub overlappingStub = sortedStubs.get(overlappingLargeStubIndex);
+ for (Integer acceptedMergeIndex : acceptedMerges) {
+ Stub acceptedMergeStub = sortedStubs.get(acceptedMergeIndex);
+ for (int layer=overlappingStub.getInnerLayer(); layer <= overlappingStub.getOuterLayer(); layer++) {
+ if (acceptedMergeStub.layerEquals(overlappingStub, layer)) {
+ // That's OK -- both are the same in that layer
+ } else if (stub1.layerEqualsAllowingForDoubleHits(overlappingStub, layer)) {
+ // That's OK -- it's part of the main stub
+ } else {
+ // Not OK
+ excluded = true;
+ break;
+ }
+ }
+ if (excluded) { break; }
+ }
+ if (!excluded) {
+ // OK to use this one
+ acceptedMerges.add(overlappingLargeStubIndex);
+ }
+ }
+ // Now move to the small stubs, being even more picky:
+ for (Integer overlappingSmallStubIndex : smallStubs) {
+ boolean excluded = false;
+ Stub overlappingStub = sortedStubs.get(overlappingSmallStubIndex);
+ for (Integer overlappingLargeStubIndex : largeStubs) {
+ Stub largeStub = sortedStubs.get(overlappingLargeStubIndex);
+ for (int layer=overlappingStub.getInnerLayer(); layer <= overlappingStub.getOuterLayer(); layer++) {
+ if (stub1.layerEqualsAllowingForDoubleHits(overlappingStub, layer)) {
+ // That's OK -- it's part of the main stub
+ } else if (largeStub.getHitsInLayer(layer)==null || largeStub.getHitsInLayer(layer).size()==0) {
+ // That's OK -- no hits in that layer in the other stub
+ } else {
+ excluded = true;
+ break;
+ }
+ }
+ if (excluded) { break; }
+ }
+ for (Integer otherOverlappingSmallStubIndex : smallStubs) {
+ if (overlappingSmallStubIndex == otherOverlappingSmallStubIndex) { continue; }
+ Stub otherSmallStub = sortedStubs.get(otherOverlappingSmallStubIndex);
+ for (int layer=overlappingStub.getInnerLayer(); layer <= overlappingStub.getOuterLayer(); layer++) {
+ if (stub1.layerEqualsAllowingForDoubleHits(overlappingStub, layer)) {
+ // That's OK -- it's part of the main stub
+ } else if (otherSmallStub.getHitsInLayer(layer)==null || otherSmallStub.getHitsInLayer(layer).size()==0) {
+ // That's OK -- no hits in that layer in the other stub
+ } else {
+ excluded = true;
+ break;
+ }
+ }
+ if (excluded) { break; }
+ }
+ if (!excluded) { acceptedMerges.add(overlappingSmallStubIndex); }
+ }
+ Set<Stub> stubsToMerge = new HashSet<Stub>();
+ stubsToMerge.add(stub1);
+ for (Integer acceptedMerge : acceptedMerges) {
+ if (m_debug) { System.out.println(" -> Merging in accepted stub ["+acceptedMerge+"]"); }
+ stubsToMerge.add(sortedStubs.get(acceptedMerge));
+ }
+ Stub mergedStub = mergeStubs(stubsToMerge);
+ if (mergedStub.getLayersCount() >= 4) {
+ finalOutputStubs.add(mergedStub);
+ if (m_debug) { System.out.println(" -> Saved merged stub with "+mergedStub.getLayersCount()+" layers ("+mergedStub.getInnerLayer()+"-"+mergedStub.getOuterLayer()+")"); }
+ }
+ }
+
+ // Validate that each hit is only used once, purging if needed.
+ Set<Long> usedHits = new HashSet<Long>();
+ Set<Long> multiplyUsedHits = new HashSet<Long>();
+ for (Stub stub : finalOutputStubs) {
+ for (CalorimeterHit hit : stub.getAllHits()) {
+ long cellID = hit.getCellID();
+ if (usedHits.contains(cellID)) {
+ multiplyUsedHits.add(cellID);
+ }
+ usedHits.add(cellID);
+ }
+ }
+
+ // Turn stubs into clusters
+ Set<Long> finalUsedHits = new HashSet<Long>();
+ List<Cluster> finalOutputClusters = new Vector<Cluster>();
+ for (Stub stub : finalOutputStubs) {
+ BasicCluster clus = new BasicCluster();
+ for (CalorimeterHit hit : stub.getAllHits()) {
+ long cellID = hit.getCellID();
+ if ( ! multiplyUsedHits.contains(cellID) ) {
+ clus.addHit(hit);
+ finalUsedHits.add(cellID);
+ }
+ }
+ // Require >= 4 hits left.
+ // Probably we should also check for stubs that got broken apart here too...
+ if (clus.getCalorimeterHits().size()>=4) {
+ finalOutputClusters.add(clus);
+ }
+ }
+ if (m_debug) { System.out.println("Return "+finalOutputClusters.size()+" stubs. Used "+finalUsedHits.size()+" hits."); }
+ return finalOutputClusters;
+ }
+
+ protected Stub mergeStubs(Collection<Stub> inputStubs) {
+ // Be careful if modifying this -- Stub is picky about the order in which hits are added.
+ Stub mergedStub = new Stub();
+ for (Stub stub : inputStubs) {
+ for (int iLayer = stub.getInnerLayer(); iLayer <= stub.getOuterLayer(); iLayer++) {
+ Collection<CalorimeterHit> hitsInLayerInThisStub = stub.getHitsInLayer(iLayer);
+ Collection<CalorimeterHit> hitsInLayerInMergedStub = mergedStub.getHitsInLayer(iLayer);
+ if (hitsInLayerInThisStub != null) {
+ String printme = new String("DEBUG: Merge: Adding "+hitsInLayerInThisStub.size()+" hits to layer "+iLayer+" (");
+ if (hitsInLayerInMergedStub != null) {
+ printme += hitsInLayerInMergedStub.size();
+ } else {
+ printme += "0";
+ }
+ printme += " already present). New hits are:";
+ for (CalorimeterHit hit : hitsInLayerInThisStub) {
+ printme += " "+hit.getCellID();
+ }
+ }
+ if (hitsInLayerInThisStub != null) {
+ Iterator<CalorimeterHit> iter = hitsInLayerInThisStub.iterator();
+ CalorimeterHit firstHit = iter.next();
+ if (iter.hasNext()) {
+ // Two hits...
+ CalorimeterHit secondHit = iter.next();
+ if (hitsInLayerInMergedStub==null) {
+ // No hits yet in the layer -- need to add both
+ mergedStub.addNonEdgeHit(firstHit);
+ mergedStub.addDoubleHit(firstHit,secondHit);
+ } else if (hitsInLayerInMergedStub.size()==1) {
+ // Contains one... need to add the other as a double-hit
+ if (hitsInLayerInMergedStub.contains(firstHit)) {
+ mergedStub.addDoubleHit(firstHit, secondHit);
+ } else if (hitsInLayerInMergedStub.contains(secondHit)) {
+ mergedStub.addDoubleHit(secondHit, firstHit);
+ } else {
+ throw new AssertionError("Book-keeping error");
+ }
+ } else if (hitsInLayerInMergedStub.size()==2) {
+ // Contains two... should both be in there already
+ if (hitsInLayerInMergedStub.contains(firstHit) && hitsInLayerInMergedStub.contains(secondHit)) {
+ // It all checks out
+ } else {
+ String printme = new String();
+ printme += "Book-keeping error: Trying to add a double-hit in layer "+iLayer+" with cell IDs ";
+ printme += firstHit.getCellID()+" and "+secondHit.getCellID()+"\n";
+ printme += "But merged stub already contains two hits with cell IDs ";
+ for (CalorimeterHit tmpHit : hitsInLayerInMergedStub) {
+ printme += tmpHit.getCellID();
+ printme += " ";
+ }
+ printme += "so I don't know what to do!";
+ Set<CalorimeterHit> tmpSet = new HashSet<CalorimeterHit>();
+ tmpSet.addAll(hitsInLayerInThisStub);
+ tmpSet.addAll(hitsInLayerInMergedStub);
+ printme += "\nDealing with "+tmpSet.size()+" unique hits.";
+ throw new AssertionError(printme);
+ }
+ } else {
+ throw new AssertionError("Book-keeping error");
+ }
+ } else {
+ // One hit...
+ if (hitsInLayerInMergedStub==null) {
+ // No hits yet in the layer -- need to add
+ mergedStub.addNonEdgeHit(firstHit);
+ } else if (hitsInLayerInMergedStub.contains(firstHit)) {
+ // Already in there -- don't need to re-add
+ } else {
+ // Already have a different hit -- add this one as a double-hit
+ CalorimeterHit existingHit = hitsInLayerInMergedStub.iterator().next();
+ mergedStub.addDoubleHit(existingHit, firstHit);
+ }
+ }
+ }
+ }
+ }
+ return mergedStub;
+ }
+
+
+ protected void findIsolatedAndSemiIsolatedHits(Map<Long,CalorimeterHit> hitsToProcess, Map<Long,CalorimeterHit> allHits, Map<Integer,List<CalorimeterHit>> isolatedHits, Map<Integer,List<CalorimeterHit>> semiIsolatedHits, Map<CalorimeterHit,CalorimeterHit> doubleHits) {
+ for (Map.Entry<Long,CalorimeterHit> hitPair : hitsToProcess.entrySet()) {
+ Long hitID = hitPair.getKey();
+ CalorimeterHit hit = hitPair.getValue();
+ IDDecoder id = hit.getIDDecoder();
+ id.setID(hitID);
+ long[] neighbours = id.getNeighbourIDs(0, 1, 1);
+ int countFoundNeighbours = 0;
+ CalorimeterHit lastNeighbourHit = null;
+ for (long neighbour : neighbours) {
+ if (allHits.keySet().contains(neighbour)) {
+ countFoundNeighbours++;
+ lastNeighbourHit = allHits.get(neighbour);
+ }
+ }
+ int layer = getLayer(hit);
+ if (countFoundNeighbours == 0) {
+ List<CalorimeterHit> isolatedHitsInThisLayer = isolatedHits.get(layer);
+ if (isolatedHitsInThisLayer == null) { isolatedHitsInThisLayer = new Vector<CalorimeterHit>(); isolatedHits.put(layer, isolatedHitsInThisLayer); }
+ isolatedHitsInThisLayer.add(hit);
+ } else if (countFoundNeighbours == 1) {
+ List<CalorimeterHit> semiIsolatedHitsInThisLayer = semiIsolatedHits.get(layer);
+ if (semiIsolatedHitsInThisLayer == null) { semiIsolatedHitsInThisLayer = new Vector<CalorimeterHit>(); semiIsolatedHits.put(layer, semiIsolatedHitsInThisLayer); }
+ semiIsolatedHitsInThisLayer.add(hit);
+ doubleHits.put(hit, lastNeighbourHit);
+ if (!allHits.values().contains(lastNeighbourHit)) {
+ throw new AssertionError("Book-keeping error");
+ }
+ }
+ }
+ return;
+ }
+
+
+ protected List<Stub> findTwoLayerStubs(Map<Integer,List<CalorimeterHit>> isolatedHits, Map<Integer,List<CalorimeterHit>> semiIsolatedHits, Map<CalorimeterHit,CalorimeterHit> doubleHitMap, double allowedSeparation3D)
+ {
+ // Hit 1 must be isolated
+ // Hit 2 can be isolated or semi-isolated
+ // Can have two hits in second layer ONLY if they form a valid doubleHit.
+ // To avoid double-counting, don't use combinations where
+ // * Hit 1 & hit 2 are both isolated
+ // * Layer of hit 2 < layer of hit 1
+ List<Stub> outputStubs = new Vector<Stub>();
+ for (Integer seedLayer : isolatedHits.keySet()) {
+ List<CalorimeterHit> candidatesForHit1 = isolatedHits.get(seedLayer);
+ if (candidatesForHit1 != null) {
+ for (CalorimeterHit hit1 : candidatesForHit1) {
+ Hep3Vector hit1Pos = new BasicHep3Vector(hit1.getPosition());
+ // Try isolated hits in next layer (+1 only to avoid ambiguity)
+ List<CalorimeterHit> isolatedHitsInNextLayer = isolatedHits.get(seedLayer+1);
+ if (isolatedHitsInNextLayer != null) {
+ for (CalorimeterHit hit2 : isolatedHitsInNextLayer) {
+ // Check separation
+ Hep3Vector hit2Pos = new BasicHep3Vector(hit2.getPosition());
+ double distance = VecOp.sub(hit1Pos, hit2Pos).magnitude();
+ if (distance < allowedSeparation3D) {
+ // Found 2-hit stub
+ Stub stub = new Stub();
+ stub.addHit(hit1);
+ stub.addHit(hit2);
+ outputStubs.add(stub);
+ }
+ }
+ }
+ // Try semi-isolated hits in layer +1 or -1
+ List<CalorimeterHit> semiIsolatedHitCandidates = new Vector<CalorimeterHit>();
+ List<CalorimeterHit> semiIsolatedHitCandidatesInLayerPlusOne = semiIsolatedHits.get(seedLayer+1);
+ List<CalorimeterHit> semiIsolatedHitCandidatesInLayerMinusOne = semiIsolatedHits.get(seedLayer-1);
+ if (semiIsolatedHitCandidatesInLayerPlusOne != null) { semiIsolatedHitCandidates.addAll(semiIsolatedHitCandidatesInLayerPlusOne); }
+ if (semiIsolatedHitCandidatesInLayerMinusOne != null) { semiIsolatedHitCandidates.addAll(semiIsolatedHitCandidatesInLayerMinusOne); }
+ List<Stub> stubsWithDoubleHit = new Vector<Stub>(); // We'll use this to make sure we don't double-count
+ for (CalorimeterHit hit2 : semiIsolatedHitCandidates) {
+ // Check separation
+ Hep3Vector hit2Pos = new BasicHep3Vector(hit2.getPosition());
+ double distance = VecOp.sub(hit1Pos, hit2Pos).magnitude();
+ if (distance < allowedSeparation3D) {
+ // Found 2-hit stub
+ Stub stub = new Stub();
+ stub.addHit(hit1);
+ stub.addHit(hit2);
+ // Check for special case of double-hit where both are valid.
+ // BE CAREFUL! If neighbour isn't semi-isolated (i.e. has extra neighbours), it will silently
+ // break code if you include it!
+ CalorimeterHit partnerDoubleHit = doubleHitMap.get(hit2);
+ if (partnerDoubleHit != null && semiIsolatedHitCandidates.contains(partnerDoubleHit)) {
+ // Check separation
+ Hep3Vector partnerHitPos = new BasicHep3Vector(partnerDoubleHit.getPosition());
+ double distanceToPartner = VecOp.sub(hit1Pos, partnerHitPos).magnitude();
+ if (distanceToPartner < allowedSeparation3D) {
+ // OK, this looks valid. Check that we haven't added it already:
+ boolean alreadyUsed = false;
+ for (Stub existingStub : stubsWithDoubleHit) {
+ if (existingStub.contains(hit1) && existingStub.contains(hit2) && existingStub.contains(partnerDoubleHit)) {
+ // Yep, we already did this one. Skip it.
+ alreadyUsed = true;
+ break;
+ }
+ }
+ if (alreadyUsed) {
+ // Don't double-count this one; instead move on to next hit candidate
+ continue;
+ } else {
+ // OK, need to add it since we haven't yet:
+ stub.addDoubleHit(hit2, partnerDoubleHit);
+ stubsWithDoubleHit.add(stub);
+ }
+ }
+ }
+ outputStubs.add(stub);
+ }
+ }
+ }
+ }
+ }
+
+ return outputStubs;
+ }
+
+ protected List<Stub> expandStubSimple(Stub stub, int layerStep, Map<Integer,List<CalorimeterHit>> isolatedHits, Map<Integer,List<CalorimeterHit>> semiIsolatedHits, Map<CalorimeterHit,CalorimeterHit> doubleHitMap, double allowedSeparation3D, double minimumDotProduct)
+ {
+ List<Stub> outputStubs = new Vector<Stub>();
+
+ // What layer should we look at?
+ int nearbyLayer;
+ Hep3Vector nearbyDirection = null;
+ if (layerStep > 0) {
+ nearbyLayer = stub.getOuterLayer();
+ nearbyDirection = VecOp.unit(VecOp.sub(stub.getOuterHitPosition(), stub.getInnerHitPosition()));
+ } else if (layerStep < 0) {
+ nearbyLayer = stub.getInnerLayer();
+ nearbyDirection = VecOp.unit(VecOp.sub(stub.getInnerHitPosition(), stub.getOuterHitPosition()));
+ } else {
+ throw new AssertionError("Invalid layerStep="+layerStep);
+ }
+ int targetLayer = nearbyLayer + layerStep;
+ Hep3Vector nearbyPosition = stub.getHitPosition(nearbyLayer);
+
+ // Find hits of interest in that layer. Start with isolated hits:
+ List<CalorimeterHit> isolatedHitsInTargetLayer = isolatedHits.get(targetLayer);
+ if (isolatedHitsInTargetLayer != null) {
+ for (CalorimeterHit hit : isolatedHitsInTargetLayer) {
+ Hep3Vector hitPos = new BasicHep3Vector(hit.getPosition());
+ Hep3Vector displacement = VecOp.sub(hitPos, nearbyPosition);
+ double distance = displacement.magnitude();
+ if (distance <= allowedSeparation3D) {
+ double dotProduct = VecOp.dot(VecOp.unit(displacement), nearbyDirection);
+ if (dotProduct >= minimumDotProduct) {
+ // Match
+ Stub newStub = new Stub(stub);
+ newStub.addHit(hit);
+ outputStubs.add(newStub);
+ }
+ }
+ }
+ }
+ // Now look at semi-isolated hits. Be careful not to double-count double-hits
+ List<CalorimeterHit> semiIsolatedHitsInTargetLayer = semiIsolatedHits.get(targetLayer);
+ if (semiIsolatedHitsInTargetLayer != null) {
+ Set<CalorimeterHit> alreadyUsedDoubleHits = new HashSet<CalorimeterHit>();
+ for (CalorimeterHit hit : semiIsolatedHitsInTargetLayer) {
+ CalorimeterHit pairedDoubleHit = doubleHitMap.get(hit);
+ if (alreadyUsedDoubleHits.contains(hit)) {
+ // Already done this one!
+ if (!alreadyUsedDoubleHits.contains(pairedDoubleHit)) { throw new AssertionError("Book-keeping error"); }
+ continue;
+ }
+ boolean matchesAsSingleHit = false;
+ boolean matchesAsDoubleHit = false;
+ Hep3Vector singleHitPos = new BasicHep3Vector(hit.getPosition());
+ Hep3Vector singleHitDisplacement = VecOp.sub(singleHitPos, nearbyPosition);
+ double singleHitDistance = singleHitDisplacement.magnitude();
+ // See if it checks out as a single hit
+ if (singleHitDistance <= allowedSeparation3D) {
+ double singleHitDotProduct = VecOp.dot(VecOp.unit(singleHitDisplacement), nearbyDirection);
+ if (singleHitDotProduct >= minimumDotProduct) {
+ matchesAsSingleHit = true;
+ }
+ }
+ if (pairedDoubleHit != null && semiIsolatedHitsInTargetLayer.contains(pairedDoubleHit)) {
+ // Possible double-hit... see if it checks out for distance & dot-product
+ Hep3Vector secondHitPos = new BasicHep3Vector(pairedDoubleHit.getPosition());
+ Hep3Vector doubleHitPos = VecOp.mult(0.5, VecOp.add(singleHitPos, secondHitPos));
+ Hep3Vector doubleHitDisplacement = VecOp.sub(doubleHitPos, nearbyPosition);
+ double doubleHitDistance = doubleHitDisplacement.magnitude();
+ if (doubleHitDistance <= allowedSeparation3D) {
+ double doubleHitDotProduct = VecOp.dot(VecOp.unit(doubleHitDisplacement), nearbyDirection);
+ if (doubleHitDotProduct >= minimumDotProduct) {
+ matchesAsDoubleHit = true;
+ }
+ }
+ }
+ // Treat preferentially as a double-hit; otherwise as single.
+ if (matchesAsDoubleHit) {
+ Stub newStub = new Stub(stub);
+ newStub.addHit(hit);
+ newStub.addDoubleHit(hit, pairedDoubleHit);
+ outputStubs.add(newStub);
+ alreadyUsedDoubleHits.add(hit);
+ alreadyUsedDoubleHits.add(pairedDoubleHit);
+ } else if (matchesAsSingleHit) {
[truncated at 1000 lines; 452 more skipped]