Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
TestNewMipFinder.java+2975added 1.1
MJC: (contrib) Another MIP-finder

lcsim/src/org/lcsim/contrib/uiowa
TestNewMipFinder.java added at 1.1
diff -N TestNewMipFinder.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TestNewMipFinder.java	8 Mar 2008 20:55:53 -0000	1.1
@@ -0,0 +1,2975 @@
+package org.lcsim.contrib.uiowa;
+
+import hep.physics.vec.*;
+import java.util.*;
+
+import org.lcsim.util.Driver;
+import org.lcsim.contrib.uiowa.NonTrivialPFA;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.loop.LCIODriver;
+import org.lcsim.util.*;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.util.hitmap.*;
+import org.lcsim.event.*;
+import org.lcsim.recon.cluster.structural.likelihood.LikelihoodEvaluatorWrapper;
+import org.lcsim.digisim.*;
+import org.lcsim.recon.cluster.mipfinder.*;
+import org.lcsim.geometry.*;
+
+public class TestNewMipFinder extends Driver implements Clusterer
+{
+    protected boolean m_debug = false;
+    protected double m_separation3D;
+    public TestNewMipFinder(double separation3D) {
+	m_separation3D = separation3D;
+    }
+
+    public TestNewMipFinder() {
+	// Run DigiSim
+	// -----------
+
+	// CalHitMapDriver is needed by DigiSim
+	add(new CalHitMapDriver());
+	// DigiSim: SimCalHits -> RawCalHits
+	DigiSimDriver digi = new org.lcsim.digisim.DigiSimDriver();
+	add(digi);
+	// RawCalHits -> SimCalorimeterHits
+	add( new SimCalorimeterHitsDriver() );
+
+	// Set up input hit lists
+	// ----------------------
+
+	String eventHitMapEcal = "inputHitMapEcal";
+	String eventHitMapHcal = "inputHitMapHcal";
+	HitListToHitMapDriver hitmapEcal = new HitListToHitMapDriver();
+	hitmapEcal.addInputList("EcalBarrDigiHits");
+	hitmapEcal.addInputList("EcalEndcapDigiHits");
+	hitmapEcal.setOutput(eventHitMapEcal);
+	HitListToHitMapDriver hitmapHcal = new HitListToHitMapDriver();
+	hitmapHcal.addInputList("HcalBarrDigiHits");
+	hitmapHcal.addInputList("HcalEndcapDigiHits");
+	hitmapHcal.setOutput(eventHitMapHcal);
+	add(hitmapEcal);
+	add(hitmapHcal);
+    }
+
+    public void process(EventHeader event) {
+	super.process(event);
+	HitMap hitMapEcal = ((HitMap)(event.get("inputHitMapEcal")));
+	HitMap hitMapHcal = ((HitMap)(event.get("inputHitMapHcal")));
+	Map<MCParticle,List<CalorimeterHit>> hitsPerParticleEcal = new HashMap<MCParticle,List<CalorimeterHit>>();
+	Map<MCParticle,List<CalorimeterHit>> hitsPerParticleHcal = new HashMap<MCParticle,List<CalorimeterHit>>();
+	fillMap(hitMapEcal, hitsPerParticleEcal);
+	fillMap(hitMapHcal, hitsPerParticleHcal);
+
+	HitMap acceptedHitsEcal = new HitMap();
+	HitMap acceptedHitsHcal = new HitMap();
+	for (MCParticle part : hitsPerParticleEcal.keySet()) {
+	    if (accept(part)) {
+		//System.out.println("  "+part.getPDGID()+", p="+part.getMomentum().magnitude()+", "+hitsPerParticleEcal.get(part).size()+" ECAL hits");
+		for (CalorimeterHit hit : hitsPerParticleEcal.get(part)) {
+		    acceptedHitsEcal.put(hit.getCellID(), hit);
+		}
+	    }
+	}
+	for (MCParticle part : hitsPerParticleHcal.keySet()) {
+	    if (accept(part)) {
+		//System.out.println("  "+part.getPDGID()+", p="+part.getMomentum().magnitude()+", "+hitsPerParticleHcal.get(part).size()+" HCAL hits");
+		for (CalorimeterHit hit : hitsPerParticleHcal.get(part)) {
+		    acceptedHitsHcal.put(hit.getCellID(), hit);
+		}
+	    }
+	}
+
+	String inputHitMapEcal = "tmpHitMapEcal";
+	String inputHitMapHcal = "tmpHitMapHcal";
+	event.put(inputHitMapEcal, acceptedHitsEcal);
+	event.put(inputHitMapHcal, acceptedHitsHcal);
+
+	System.out.println("ECAL: Accepted "+acceptedHitsEcal.size()+" / "+hitMapEcal.size());
+	System.out.println("HCAL: Accepted "+acceptedHitsHcal.size()+" / "+hitMapHcal.size());
+
+	List<Cluster> foundClustersECAL = subProcess2(acceptedHitsEcal, hitMapEcal, 20.0);
+	List<Cluster> foundClustersHCAL = subProcess2(acceptedHitsHcal, hitMapHcal, 50.0);
+    }
+
+    public List<Cluster> createClusters(List<CalorimeterHit> hits) {
+	HitMap map = new HitMap();
+	for (CalorimeterHit hit : hits) {
+	    map.put(hit.getCellID(), hit);
+	}
+	return subProcess2(map, map, m_separation3D);
+    }	
+    public List<Cluster> createClusters(Map<Long,CalorimeterHit> map) {
+	return subProcess2(map, map, m_separation3D);
+    }
+
+
+   protected List<Cluster> subProcess2(Map<Long,CalorimeterHit> inputAcceptedHits, Map<Long,CalorimeterHit> inputHits, double allowedSeparation3D) 
+    {
+	if (m_debug) {
+	    int count = 0;
+	    for (CalorimeterHit hit : inputAcceptedHits.values()) {
+		if (inputHits.values().contains(hit)) { count++; }
+	    }
+	    System.out.println("DEBUG: Of the "+inputAcceptedHits.size()+" inputAcceptedHits, "+count+" are in inputHits.");
+	    count = 0;
+	    for (CalorimeterHit hit : inputHits.values()) {
+		if (inputAcceptedHits.values().contains(hit)) { count++; }
+	    }
+	    System.out.println("DEBUG: Of the "+inputAcceptedHits.size()+" inputHits, "+count+" are in inputAcceptedHits.");
+	}
+
+	Map<Integer, List<CalorimeterHit>> isolatedHitsHcal = new HashMap<Integer, List<CalorimeterHit>>();
+	Map<Integer, List<CalorimeterHit>> semiIsolatedHitsHcal = new HashMap<Integer, List<CalorimeterHit>>();
+	Map<CalorimeterHit,CalorimeterHit> doubleHitMapHcal = new HashMap<CalorimeterHit,CalorimeterHit>();
+	long timeBeforeFindIsolatedAndSemiIsolatedHits = Calendar.getInstance().getTimeInMillis();
+	findIsolatedAndSemiIsolatedHits(inputAcceptedHits, inputHits, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal);
+
+	List<Stub> twoLayerStubs = findTwoLayerStubs(isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D);
+	if (m_debug) { System.out.println("DEBUG: Found "+twoLayerStubs.size()+" two-layer stubs."); }
+	long timeAfterFindTwoLayerStubs = Calendar.getInstance().getTimeInMillis();
+
+	for (Stub stub : twoLayerStubs) {
+	    for (int layer=stub.getInnerLayer(); layer<=stub.getOuterLayer(); layer++) {
+		Collection<CalorimeterHit> hitsInLayer = stub.getHitsInLayer(layer);
+		if (hitsInLayer != null) {
+		    if (hitsInLayer.size()>1) {
+			if (hitsInLayer.size() > 2) { throw new AssertionError("Book-keeping error"); }
+			Iterator<CalorimeterHit> iter = hitsInLayer.iterator();
+			CalorimeterHit hit1 = iter.next();
+			CalorimeterHit hit2 = iter.next();
+			if (getLayer(hit1) != layer) { throw new AssertionError("Book-keeping error"); }
+			if (getLayer(hit2) != layer) { throw new AssertionError("Book-keeping error"); }
+			List<CalorimeterHit> isolatedHitsInLayer = isolatedHitsHcal.get(layer);
+			List<CalorimeterHit> semiIsolatedHitsInLayer = semiIsolatedHitsHcal.get(layer);
+			if (isolatedHitsInLayer != null && isolatedHitsInLayer.contains(hit1)) { throw new AssertionError("2 hits in layer "+layer+", but hit is marked as isolated. Stub contains "+stub.getLayersCount()+" layers."); }
+			if (isolatedHitsInLayer != null && isolatedHitsInLayer.contains(hit2)) { throw new AssertionError("2 hits in layer "+layer+", but hit is marked as isolated. Stub contains "+stub.getLayersCount()+" layers."); }
+			if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit1))) { throw new AssertionError("2 hits in layer "+layer+", but hit not is marked as semi-isolated. Stub contains "+stub.getLayersCount()+" layers."); }
+			if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit2))) { throw new AssertionError("2 hits in layer"+layer+", but hit not is marked as semi-isolated. Stub contains "+stub.getLayersCount()+" layers."); 
+			}
+			CalorimeterHit doubleHitOfHit1 = doubleHitMapHcal.get(hit1);
+			CalorimeterHit doubleHitOfHit2 = doubleHitMapHcal.get(hit2);
+			if (doubleHitOfHit1 != hit2) { throw new AssertionError("2 hits in layer, but DoubleHit(hit1) != hit2"); }
+			if (doubleHitOfHit2 != hit1) { throw new AssertionError("2 hits in layer, but DoubleHit(hit2) != hit1"); }
+		    }
+		}
+	    }
+	}
+
+	double minimumDotProduct = 0.95;
+	List<Stub> multiLayerStubs = new Vector<Stub>();
+	Set<Stub> usedTwoLayerStubs = new HashSet<Stub>();
+	for (Stub twoLayerStub : twoLayerStubs) {
+	    if (usedTwoLayerStubs.contains(twoLayerStub)) {
+		// Already included this one
+		continue;
+	    }
+	    Stub currentStub = twoLayerStub;
+	    usedTwoLayerStubs.add(twoLayerStub);
+	    //System.out.println("DEBUG: About to start expanding a 2-layer stub:");
+	    while(true) {
+		//System.out.println("DEBUG: Will try to expand a "+currentStub.getLayersCount()+"-layer stub covering layers "+currentStub.getInnerLayer()+"-"+currentStub.getOuterLayer());
+		List<Stub> stubCandidatesExpandingOutward = expandStubSimple(currentStub, +1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+		//System.out.println("DEBUG: Tried to expand a "+currentStub.getLayersCount()+"-layer stub outward; made "+stubCandidatesExpandingOutward.size()+" candidates.");
+		if (stubCandidatesExpandingOutward.size() == 1) {
+		    currentStub = stubCandidatesExpandingOutward.get(0);
+		    //System.out.println("DEBUG: Expanded stub outward. New stub is a "+currentStub.getLayersCount()+"-layer stub covering layers "+currentStub.getInnerLayer()+"-"+currentStub.getOuterLayer());
+		    continue;
+		}
+		List<Stub> stubCandidatesExpandingInward = expandStubSimple(currentStub, -1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+		//System.out.println("DEBUG: Tried to expand a "+currentStub.getLayersCount()+"-layer stub inward; made "+stubCandidatesExpandingInward.size()+" candidates.");
+		if (stubCandidatesExpandingInward.size() == 1) {
+		    currentStub = stubCandidatesExpandingInward.get(0);
+		    //System.out.println("DEBUG: Expanded stub inward. New stub is a "+currentStub.getLayersCount()+"-layer stub covering layers "+currentStub.getInnerLayer()+"-"+currentStub.getOuterLayer());
+		    continue;
+		}
+		// Wasn't able to find an unambiguous expansion for +- 1 layer.
+		// Consider skipping a layer, provided that
+		//   * we had hits in the last 3 layers
+		//   * we find hits in 2+ layers after the gap
+		int outerLayer = currentStub.getOuterLayer();
+		boolean hitInOuterLayer = (currentStub.getHitsInLayer(outerLayer) != null && currentStub.getHitsInLayer(outerLayer).size()>0);
+		boolean hitInOuterLayerMinusOne = (currentStub.getHitsInLayer(outerLayer-1) != null && currentStub.getHitsInLayer(outerLayer-1).size()>0);
+		boolean hitInOuterLayerMinusTwo = (currentStub.getHitsInLayer(outerLayer-2) != null && currentStub.getHitsInLayer(outerLayer-2).size()>0);
+		if (!hitInOuterLayer) { throw new AssertionError("Book-keeping error"); }
+		if (hitInOuterLayer && hitInOuterLayerMinusOne && hitInOuterLayerMinusTwo) {
+		    // Hits in last 3 layers... consider skipping one
+		    // Watch out -- is the 3D separation still sensible? -- FIXME
+		    List<Stub> stubCandidatesExpandingOutwardTwoLayers = expandStubSimple(currentStub, +2, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+		    Stub candidateForNewStub = null;
+		    if (stubCandidatesExpandingOutwardTwoLayers.size() == 1) {
+			candidateForNewStub = stubCandidatesExpandingOutwardTwoLayers.get(0);
+		    }
+		    if (candidateForNewStub != null) {
+			// Require another layer in the same direction
+			List<Stub> stubCandidatesExpandingOutwardAnotherLayer = expandStubSimple(candidateForNewStub, +1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+			if (stubCandidatesExpandingOutwardAnotherLayer.size() == 1) {
+			    // Yes, this is valid
+			    currentStub = stubCandidatesExpandingOutwardAnotherLayer.get(0);
+			    continue;
+			}
+		    }
+		}
+		int innerLayer = currentStub.getInnerLayer();
+		boolean hitInInnerLayer = (currentStub.getHitsInLayer(innerLayer) != null && currentStub.getHitsInLayer(innerLayer).size()>0);
+		boolean hitInInnerLayerPlusOne = (currentStub.getHitsInLayer(innerLayer+1) != null && currentStub.getHitsInLayer(innerLayer+1).size()>0);
+		boolean hitInInnerLayerPlusTwo = (currentStub.getHitsInLayer(innerLayer+2) != null && currentStub.getHitsInLayer(innerLayer+2).size()>0);
+		if (!hitInInnerLayer) { throw new AssertionError("Book-keeping error"); }
+		if (hitInInnerLayer && hitInInnerLayerPlusOne && hitInInnerLayerPlusTwo) {
+		    // Hits in last 3 layers... consider skipping one
+		    // Watch out -- is the 3D separation still sensible? -- FIXME
+		    List<Stub> stubCandidatesExpandingInwardTwoLayers = expandStubSimple(currentStub, -2, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+		    Stub candidateForNewStub = null;
+		    if (stubCandidatesExpandingInwardTwoLayers.size() == 1) {
+			candidateForNewStub = stubCandidatesExpandingInwardTwoLayers.get(0);
+		    }
+		    if (candidateForNewStub != null) {
+			// Require another layer in the same direction
+			List<Stub> stubCandidatesExpandingInwardAnotherLayer = expandStubSimple(candidateForNewStub, -1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+			if (stubCandidatesExpandingInwardAnotherLayer.size() == 1) {
+			    // Yes, this is valid
+			    currentStub = stubCandidatesExpandingInwardAnotherLayer.get(0);
+			    continue;
+			}
+		    }
+		}
+		// Wasn't able to find an unambiguous expansion
+		break;
+	    }
+	    if (currentStub.getLayersCount()>2) {
+		// Extended it to a useful size -- keep this one.
+		multiLayerStubs.add(currentStub);
+
+		// Now go back and check off any 2-layer stubs inside it:
+		//System.out.println("For this "+currentStub.getLayersCount()+"-layer stub, will now go back and check for contained 2-layer stubs...");
+		Collection<CalorimeterHit> allHitsInCurrentStub = currentStub.getAllHits();
+		for (Stub testTwoLayerStub : twoLayerStubs) {
+		    if (usedTwoLayerStubs.contains(testTwoLayerStub)) { 
+			// Already flagged
+			continue; 
+		    }
+		    if (testTwoLayerStub.getInnerLayer() >= currentStub.getInnerLayer() && testTwoLayerStub.getOuterLayer() <= currentStub.getOuterLayer()) {
+			Collection<CalorimeterHit> allHitsInTwoLayerStub = testTwoLayerStub.getAllHits();
+			boolean exactMatch = true;
+			for (CalorimeterHit hit : allHitsInTwoLayerStub) {
+			    if (!allHitsInCurrentStub.contains(hit)) {
+				// Not an exact match
+				exactMatch = false;
+				break;
+			    }
+			}
+			if (exactMatch) {
+			    // Flag it
+			    usedTwoLayerStubs.add(testTwoLayerStub);
+			    //System.out.println("DEBUG: Flagged a 2-layer stub. There are now "+usedTwoLayerStubs.size()+" flagged 2-layer stubs.");
+			}
+		    }
+		}
+	    }
+	}
+
+	if (m_debug) { System.out.println("There are now "+multiLayerStubs.size()+" multi-layer stubs found. "+usedTwoLayerStubs.size()+"/"+twoLayerStubs.size()+" two-layer stubs flagged as used."); }
+
+	for (Stub stub : multiLayerStubs) {
+	    for (int layer=stub.getInnerLayer(); layer<=stub.getOuterLayer(); layer++) {
+		Collection<CalorimeterHit> hitsInLayer = stub.getHitsInLayer(layer);
+		if (hitsInLayer != null) {
+		    if (hitsInLayer.size()>1) {
+			if (hitsInLayer.size() > 2) { throw new AssertionError("Book-keeping error"); }
+			Iterator<CalorimeterHit> iter = hitsInLayer.iterator();
+			CalorimeterHit hit1 = iter.next();
+			CalorimeterHit hit2 = iter.next();
+			List<CalorimeterHit> isolatedHitsInLayer = isolatedHitsHcal.get(layer);
+			List<CalorimeterHit> semiIsolatedHitsInLayer = semiIsolatedHitsHcal.get(layer);
+			if (isolatedHitsInLayer!=null && isolatedHitsInLayer.contains(hit1)) { throw new AssertionError("2 hits in layer, but hit is marked as isolated."); }
+			if (isolatedHitsInLayer!=null && isolatedHitsInLayer.contains(hit2)) { throw new AssertionError("2 hits in layer, but hit is marked as isolated."); }
+			if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit1))) { throw new AssertionError("2 hits in layer, but hit not is marked as semi-isolated."); }
+			if (semiIsolatedHitsInLayer==null || !(semiIsolatedHitsInLayer.contains(hit2))) { throw new AssertionError("2 hits in layer, but hit not is marked as semi-isolated."); }
+			CalorimeterHit doubleHitOfHit1 = doubleHitMapHcal.get(hit1);
+			CalorimeterHit doubleHitOfHit2 = doubleHitMapHcal.get(hit2);
+			if (doubleHitOfHit1 != hit2) { throw new AssertionError("2 hits in layer, but DoubleHit(hit1) != hit2"); }
+			if (doubleHitOfHit2 != hit1) { throw new AssertionError("2 hits in layer, but DoubleHit(hit2) != hit1"); }
+		    }
+		}
+	    }
+	}
+
+	// Look for identical stubs
+	Set<Integer> identicalStubsToPurge = new HashSet<Integer>();
+	for (int i=0; i<multiLayerStubs.size(); i++) {
+	    Stub stub1 = multiLayerStubs.get(i);
+	    for (int j=i+1; j<multiLayerStubs.size(); j++) {
+		Stub stub2 = multiLayerStubs.get(j);
+		if (stub1.equals(stub2)) {
+		    identicalStubsToPurge.add(j);
+		    if (m_debug) { System.out.println("Flagged ["+j+"] for removal, since it exactly matches ["+i+"]"); }
+		}
+	    }
+	}
+	List<Stub> nonIdenticalMultiLayerStubs = new Vector<Stub>();
+	for (int i=0; i<multiLayerStubs.size(); i++) {
+	    if (! identicalStubsToPurge.contains(i) ) {
+		Stub stub = multiLayerStubs.get(i);
+		nonIdenticalMultiLayerStubs.add(stub);
+	    } else {
+		if (m_debug) { System.out.println("Removed ["+i+"]"); }
+	    }
+	}
+
+	if (m_debug) { System.out.println("After removing identical stubs, there are "+nonIdenticalMultiLayerStubs.size()+" multi-layer stubs left."); }
+
+	// Look for near-identical stubs
+	List<Stub> uniqueMultiLayerStubs = new Vector<Stub>();
+	{
+	    Map<Stub,Set<Stub>> mapInputToOutputStubs = new HashMap<Stub,Set<Stub>>();
+	    for (Stub stub : nonIdenticalMultiLayerStubs) { 
+		Set<Stub> tmpList = new HashSet<Stub>();
+		tmpList.add(stub);
+		mapInputToOutputStubs.put(stub,tmpList); 
+	    }
+	    for (int i=0; i<nonIdenticalMultiLayerStubs.size(); i++) {
+		Stub stub1 = nonIdenticalMultiLayerStubs.get(i);
+		for (int j=i+1; j<nonIdenticalMultiLayerStubs.size(); j++) {
+		    Stub stub2 = nonIdenticalMultiLayerStubs.get(j);
+		    if (stub1.equalsAllowingForSkippedLayersAndDoubleHits(stub2)) {
+			// Need to merge them.
+			Set<Stub> stub1Target = mapInputToOutputStubs.get(stub1);
+			Set<Stub> stub2Target = mapInputToOutputStubs.get(stub2);
+			Set<Stub> mergedTarget = new HashSet<Stub>();
+			mergedTarget.addAll(stub1Target);
+			mergedTarget.addAll(stub2Target);
+			for (Stub targetStub : mergedTarget) {
+			    mapInputToOutputStubs.put(targetStub, mergedTarget);
+			}
+		    }
+		}
+	    }
+	    for (Stub stub : mapInputToOutputStubs.keySet()) {
+		Set<Stub> targets = mapInputToOutputStubs.get(stub);
+		if (!targets.contains(stub)) { throw new AssertionError("Book-keeping error"); }
+	    }
+	    for (Set<Stub> targets : mapInputToOutputStubs.values()) {
+		for (Stub target : targets) {
+		    Set<Stub> targetOfTarget = mapInputToOutputStubs.get(target);
+		    if (targetOfTarget != targets) { throw new AssertionError("Book-keeping error"); }
+		}
+	    }
+	    Set<Stub> alreadyCountedStubs = new HashSet<Stub>();
+	    for (Stub stub : mapInputToOutputStubs.keySet()) {
+		if (alreadyCountedStubs.contains(stub)) {
+		    continue;
+		} else {
+		    Set<Stub> targets = mapInputToOutputStubs.get(stub);
+		    if (targets.size() < 1) {
+			throw new AssertionError("Book-keeping error"); 
+		    } else if (targets.size() == 1) {
+			uniqueMultiLayerStubs.addAll(targets);
+		    } else {
+			Stub mergedStub = mergeStubs(targets);
+			uniqueMultiLayerStubs.add(mergedStub);
+		    }
+		    alreadyCountedStubs.addAll(targets);
+		}
+	    }
+	}
+
+	if (m_debug) { System.out.println("After removing near-identical stubs, there are "+uniqueMultiLayerStubs.size()+" multi-layer stubs left."); }
+
+	// Check for subsets
+	// If Stub1 is an exact subset of Stub2, we'll simply discard Stub1.
+	// Note that this is trickier if it's not an exact subset (e.g. single
+	// vs double hit), but we don't cover that case here.
+	List<Stub> uniqueMultiLayerStubsNoSubsets = new Vector<Stub>();
+	{
+	    Set<Integer> stubsToDiscard = new HashSet<Integer>();
+	    for (int i=0; i<uniqueMultiLayerStubs.size(); i++) {
+		Stub stub1 = uniqueMultiLayerStubs.get(i);
+		for (int j=i+1; j<uniqueMultiLayerStubs.size(); j++) {
+		    Stub stub2 = uniqueMultiLayerStubs.get(j);
+		    boolean stub1IsSubsetOfStub2 = stub1.isSubsetOfOrEquals(stub2);
+		    boolean stub2IsSubsetOfStub1 = stub2.isSubsetOfOrEquals(stub1);
+		    if (stub2IsSubsetOfStub1) {
+			if (m_debug) { System.out.println("DEBUG: Stub2 [j="+j+"] covering layers "+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()+" is a subset of Stub1[i="+i+"] covering layers "+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()); }
+			stubsToDiscard.add(j);
+		    }
+		    if (stub1IsSubsetOfStub2 && !stub2IsSubsetOfStub1) {
+			if (m_debug) { System.out.println("DEBUG: Stub1 [i="+i+"] covering layers "+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+" is a subset of Stub2[j="+j+"] covering layers "+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()); }
+			stubsToDiscard.add(i);
+		    }
+		}
+	    }
+	    for (int i=0; i<uniqueMultiLayerStubs.size(); i++) {
+		if (!stubsToDiscard.contains(i)) {
+		    Stub stub = uniqueMultiLayerStubs.get(i);
+		    uniqueMultiLayerStubsNoSubsets.add(stub);
+		}
+	    }
+	}
+
+	if (m_debug) { System.out.println("After removing subset stubs, there are "+uniqueMultiLayerStubsNoSubsets.size()+" multi-layer stubs left."); }
+
+	// Now check for near-subsets (allowing for double hits)
+	List<Stub> uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits = new Vector<Stub>();
+	{
+	    Map<Integer, Set<Integer>> mergeTargets = new HashMap<Integer, Set<Integer>>();
+	    for (int i=0; i<uniqueMultiLayerStubsNoSubsets.size(); i++) {
+		Stub stub1 = uniqueMultiLayerStubsNoSubsets.get(i);
+		for (int j=i+1; j<uniqueMultiLayerStubsNoSubsets.size(); j++) {
+		    Stub stub2 = uniqueMultiLayerStubsNoSubsets.get(j);
+		    boolean stub1IsSubsetOfStub2 = stub1.isSubsetOfOrEqualsAllowingForDoubleHits(stub2);
+		    boolean stub2IsSubsetOfStub1 = stub2.isSubsetOfOrEqualsAllowingForDoubleHits(stub1);
+		    if (stub2IsSubsetOfStub1 || stub1IsSubsetOfStub2) {
+			Set<Integer> targetsOfStub1 = mergeTargets.get(stub1);
+			Set<Integer> targetsOfStub2 = mergeTargets.get(stub2);
+			Set<Integer> combinedTargets = new HashSet<Integer>();
+			if (targetsOfStub1 != null) {
+			    combinedTargets.addAll(targetsOfStub1);
+			}
+			if (targetsOfStub2 != null) {
+			    combinedTargets.addAll(targetsOfStub2);
+			}
+			combinedTargets.add(i);
+			combinedTargets.add(j);
+			for (Integer k : combinedTargets) {
+			    mergeTargets.put(k, combinedTargets);
+			}
+		    }
+		}
+	    }
+	    // First handle the easy cases:
+	    Set<Integer> mergeTargetsKeySet = mergeTargets.keySet();
+	    for (int i=0; i<uniqueMultiLayerStubsNoSubsets.size(); i++) {
+		Stub stub = uniqueMultiLayerStubsNoSubsets.get(i);
+		if ( ! mergeTargetsKeySet.contains(i) ) {
+		    // This one wasn't attached to anything else -- just add it
+		    uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.add(stub);
+		}
+	    }
+	    // Now go back and do the ones that need merging, being
+	    // careful not to double-count:
+	    Set<Integer> usedStubs = new HashSet<Integer>();
+	    for (Integer i : mergeTargetsKeySet) {
+		if (usedStubs.contains(i)) { 
+		    continue;
+		}
+		Set<Integer> targets = mergeTargets.get(i);
+		Set<Stub> targetStubs = new HashSet<Stub>();
+		for (Integer j : targets) {
+		    targetStubs.add(uniqueMultiLayerStubsNoSubsets.get(j));
+		}
+		Stub mergedStub = mergeStubs(targetStubs);
+		uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.add(mergedStub);
+		usedStubs.addAll(targets);
+	    }
+	}
+
+	if (m_debug) { System.out.println("After removing subset stubs (allowing for double-hits), there are "+uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.size()+" multi-layer stubs left."); }
+
+	// Look for overlaps...
+	for (int i=0; i<uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.size(); i++) {
+	    Stub stub1 = uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.get(i);
+	    if (stub1.getLayersCount() > 3) {
+		Collection<CalorimeterHit> allHitsInStub1 = stub1.getAllHits();
+		for (int j=0; j<uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.size(); j++) {
+		    if (i == j) { continue; }
+		    Stub stub2 = uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits.get(j);
+		    Collection<CalorimeterHit> allHitsInStub2 = stub2.getAllHits();
+		    boolean overlap = false;
+		    for (CalorimeterHit hit : allHitsInStub2) {
+			if (allHitsInStub1.contains(hit)) {
+			    overlap = true;
+			    break;
+			}
+		    }
+		    if (overlap) {
+			String printme = new String("Overlap between stub["+i+"] with "+stub1.getLayersCount()+" layers ("+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+") and stub["+j+"] with "+stub2.getLayersCount()+" layers ("+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()+")\n");
+			int innerLayer = stub1.getInnerLayer();
+			int outerLayer = stub1.getOuterLayer();
+			if (stub2.getInnerLayer() < innerLayer) { innerLayer = stub2.getInnerLayer(); }
+			if (stub2.getOuterLayer() > outerLayer) { outerLayer = stub2.getOuterLayer(); }
+			boolean foundHitInStub1ButNotStub2 = false;
+			boolean foundHitInStub2ButNotStub1 = false;
+			boolean foundLayerWhereStubsHaveNonOverlappingHits = false;
+			for (int layer=innerLayer; layer<=outerLayer; layer++) {
+			    Collection<CalorimeterHit> stub1HitsInLayer = stub1.getHitsInLayer(layer);
+			    Collection<CalorimeterHit> stub2HitsInLayer = stub2.getHitsInLayer(layer);
+			    // Check some stuff
+			    boolean flagLayerGoodMatch = false;
+			    boolean flagLayerBadMatch = false;
+			    if (stub1HitsInLayer != null && stub1HitsInLayer.size() > 0 && stub2HitsInLayer == null) {
+				foundHitInStub1ButNotStub2 = true;
+			    } else if (stub2HitsInLayer != null && stub2HitsInLayer.size() > 0 && stub1HitsInLayer == null) {
+				foundHitInStub2ButNotStub1 = true;
+			    } else if (stub1HitsInLayer == null && stub2HitsInLayer == null) {
+				// This can happen if both skipped that layer
+				printme += "[debug: both no hits/layer skip for layer "+layer+"]\n";
+				//throw new AssertionError("Book-keeping error");
+			    } else if (stub2HitsInLayer != null && stub2HitsInLayer.size() > 0 && stub1HitsInLayer != null && stub1HitsInLayer.size() > 0) {
+				// Both have hits in this layer
+				boolean foundOverlapInThisLayer = false;
+				for (CalorimeterHit hit : stub1HitsInLayer) {
+				    if (stub2HitsInLayer.contains(hit)) {
+					foundOverlapInThisLayer = true;
+				    }
+				}
+				if (foundOverlapInThisLayer) {
+				    flagLayerGoodMatch = true;
+				} else {
+				    foundLayerWhereStubsHaveNonOverlappingHits = true;
+				    foundHitInStub1ButNotStub2 = true;
+				    foundHitInStub2ButNotStub1 = true;
+				    flagLayerBadMatch = true;
+				}
+			    } else {
+				throw new AssertionError("Book-keeping error");
+			    } 
+			    printme += "   * L"+layer+"  ";
+			    printme += "Stub1=[";
+			    if (stub1HitsInLayer == null) {
+				printme += "empty";
+			    } else {
+				for (CalorimeterHit hit : stub1HitsInLayer) {
+				    printme += hit.getCellID() + "/";
+				}
+			    }
+			    printme += "], Stub2=[";
+			    if (stub2HitsInLayer == null) {
+				printme += "empty";
+			    } else {
+				for (CalorimeterHit hit : stub2HitsInLayer) {
+				    printme += hit.getCellID() + "/";
+				}
+			    }
+			    if (flagLayerGoodMatch) {
+				printme += "   -- good match";
+			    }
+			    if (flagLayerBadMatch) {
+				printme += "   -- doesn't match";
+			    }
+			    printme += "]\n";
+			}
+			boolean interestingFlag = false;
+			if (foundHitInStub1ButNotStub2 && !foundHitInStub2ButNotStub1) {
+			    interestingFlag  = true;
+			    printme += "   --> Looks like Stub2 is a subset of Stub1.\n";
+			} else if (!foundHitInStub1ButNotStub2 && foundHitInStub2ButNotStub1) {
+			    interestingFlag  = true;
+			    printme += "   --> Looks like Stub1 is a subset of Stub2.\n";
+			} else if (!foundLayerWhereStubsHaveNonOverlappingHits) {
+			    interestingFlag = true;
+			    printme += "   --> Looks like they could be compatible.";
+			    printme += " Flags: equals=";
+			    printme += (stub1.equals(stub2));
+			    printme += ", near-equals=";
+			    printme += (stub1.equalsAllowingForSkippedLayersAndDoubleHits(stub2));
+			    printme += "Reverse flags: equals=";
+			    printme += (stub2.equals(stub1));
+			    printme += ", near-equals=";
+			    printme += (stub2.equalsAllowingForSkippedLayersAndDoubleHits(stub1));
+			    printme += "\n";
+			}
+			interestingFlag = true; // Force heavy debug
+			if (interestingFlag) {
+			    if (m_debug) { System.out.println(printme); }
+			}
+		    }
+		}
+	    }
+	}
+
+	// Now, which were the largest stubs in the event?
+	Map<Integer, Set<Stub>> mapLayerCountToStubs = new HashMap<Integer, Set<Stub>>();
+	for (Stub stub : uniqueMultiLayerStubsNoSubsetsAllowingForDoubleHits) {
+	    int layerCount = stub.getLayersCount();
+	    Set<Stub> stubsWithThisLayerCount = mapLayerCountToStubs.get(layerCount);
+	    if (stubsWithThisLayerCount == null) {
+		stubsWithThisLayerCount = new HashSet<Stub>();
+		mapLayerCountToStubs.put(layerCount, stubsWithThisLayerCount);
+	    }
+	    stubsWithThisLayerCount.add(stub);
+	}
+	List<Integer> layerCountList = new Vector<Integer>();
+	layerCountList.addAll(mapLayerCountToStubs.keySet());
+	Collections.sort(layerCountList, Collections.reverseOrder());
+	List<Stub> sortedStubs = new Vector<Stub>();
+	for (Integer i : layerCountList) {
+	    Set<Stub> stubs = mapLayerCountToStubs.get(i);
+	    sortedStubs.addAll(stubs);
+	}
+
+	// Loop over stubs & create final output list
+	List<Stub> finalOutputStubs = new Vector<Stub>();
+	Set<Integer> vetoedStubs = new HashSet<Integer>();
+	for (int i=0; i<sortedStubs.size(); i++) {
+	    if (vetoedStubs.contains(i)) { continue; }
+	    Stub stub1 = sortedStubs.get(i);
+	    if (stub1.getLayersCount() < 4) {
+		// Too small -- skip. [Could probably just break here]
+		continue;
+	    }
+	    Collection<CalorimeterHit> allHitsInStub1 = stub1.getAllHits();
+	    if (m_debug) { System.out.println(i+": Stub with "+stub1.getLayersCount()+" layers ("+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+")"); }
+	    Set<Integer> overlappingCompatibleStubs = new HashSet<Integer>();
+	    for (int j=i+1; j<sortedStubs.size(); j++) {
+		if (vetoedStubs.contains(j)) { continue; }
+		Stub stub2 = sortedStubs.get(j);
+		if (stub1.overlaps(stub2)) {
+		    String printme = new String("Overlap between stub["+i+"] with "+stub1.getLayersCount()+" layers ("+stub1.getInnerLayer()+"-"+stub1.getOuterLayer()+") and stub["+j+"] with "+stub2.getLayersCount()+" layers ("+stub2.getInnerLayer()+"-"+stub2.getOuterLayer()+")");
+		    if (stub1.getInnerLayer() > stub2.getInnerLayer() || stub1.getOuterLayer() < stub2.getOuterLayer()) {
+			if (stub1.compatible(stub2, doubleHitMapHcal)) {
+			    printme += "  -- compatible!";
+			    overlappingCompatibleStubs.add(j);
+			}
+		    } else {
+			printme += "  -- enclosed";
+		    }
+		    if (m_debug) { System.out.println(printme); }
+		    vetoedStubs.add(j);
+		}
+	    }
+	    // Now, we consider merging some of the overlapping,
+	    // compatible stubs in. We basically look at two cases:
+	    //   (1) Large overlapping, compatible stubs. Here the
+	    //       requirement is at least 4 non-overlapping layers.
+	    //       These take priority (starting with the ones with the
+	    //       most non-overlapping layers).
+	    //   (2) Small overlapping, compatible stubs. Here the
+	    //       requirement is at least 2 overlapping layers. We
+	    //       also require no ambiguity -- i.e. if there are
+	    //       multiple ones that could be attached but which are
+	    //       not mutually compatible, we don't use any of them.
+	    Map<Integer,Integer> mapIndexToNumNonOverlappingLayers = new HashMap<Integer,Integer>();
+	    List<Integer> largeStubs = new Vector<Integer>();
+	    List<Integer> smallStubs = new Vector<Integer>();
+	    for (Integer j : overlappingCompatibleStubs) {
+		Stub otherStub = sortedStubs.get(j);
+		int countNonOverlappingLayers = 0;
+		int countOverlappingLayers = 0;
+		for (int layer = otherStub.getInnerLayer(); layer <= otherStub.getOuterLayer(); layer++) {
+		    if (otherStub.getHitsInLayer(layer) != null && otherStub.getHitsInLayer(layer).size() > 0) {
+			if (stub1.getHitsInLayer(layer) == null || stub1.getHitsInLayer(layer).size()==0) {
+			    countNonOverlappingLayers++;
+			} else {
+			    countOverlappingLayers++;
+			}
+		    }
+		}
+		mapIndexToNumNonOverlappingLayers.put(j, countNonOverlappingLayers);
+		if (countNonOverlappingLayers >= 4) {
+		    largeStubs.add(j);
+		} else if (countNonOverlappingLayers >= 1 && countOverlappingLayers >= 2) {
+		    smallStubs.add(j);
+		}
+	    }
+	    // Sort list of large stubs by size (lazy coding)
+	    for (int iSort = 0; iSort < largeStubs.size(); iSort++) {
+		Integer iSortIndex = largeStubs.get(iSort);
+		for (int jSort = iSort+1; jSort < largeStubs.size(); jSort++) {
+		    Integer jSortIndex = largeStubs.get(jSort);
+		    if (mapIndexToNumNonOverlappingLayers.get(jSortIndex) > mapIndexToNumNonOverlappingLayers.get(iSortIndex)) {
+			largeStubs.set(iSort, jSortIndex);
+			largeStubs.set(jSort, iSortIndex);
+		    }
+		}
+	    }
+	    // Find & accept, starting with large stubs:
+	    Set<Integer> acceptedMerges = new HashSet<Integer>();
+	    for (Integer overlappingLargeStubIndex : largeStubs) {
+		// Has this one already been excluded?
+		boolean excluded = false;
+		Stub overlappingStub = sortedStubs.get(overlappingLargeStubIndex);
+		for (Integer acceptedMergeIndex : acceptedMerges) {
+		    Stub acceptedMergeStub = sortedStubs.get(acceptedMergeIndex);
+		    for (int layer=overlappingStub.getInnerLayer(); layer <= overlappingStub.getOuterLayer(); layer++) {
+			if (acceptedMergeStub.layerEquals(overlappingStub, layer)) {
+			    // That's OK -- both are the same in that layer
+			} else if (stub1.layerEqualsAllowingForDoubleHits(overlappingStub, layer)) {
+			    // That's OK -- it's part of the main stub
+			} else {
+			    // Not OK
+			    excluded = true;
+			    break;
+			}
+		    }
+		    if (excluded) { break; }
+		}
+		if (!excluded) {
+		    // OK to use this one
+		    acceptedMerges.add(overlappingLargeStubIndex);
+		}
+	    }
+	    // Now move to the small stubs, being even more picky:
+	    for (Integer overlappingSmallStubIndex : smallStubs) {
+		boolean excluded = false;
+		Stub overlappingStub = sortedStubs.get(overlappingSmallStubIndex);
+		for (Integer overlappingLargeStubIndex : largeStubs) {
+		    Stub largeStub = sortedStubs.get(overlappingLargeStubIndex);
+		    for (int layer=overlappingStub.getInnerLayer(); layer <= overlappingStub.getOuterLayer(); layer++) {
+			if (stub1.layerEqualsAllowingForDoubleHits(overlappingStub, layer)) {
+			    // That's OK -- it's part of the main stub
+			} else if (largeStub.getHitsInLayer(layer)==null || largeStub.getHitsInLayer(layer).size()==0) {
+			    // That's OK -- no hits in that layer in the other stub
+			} else {
+			    excluded = true;
+			    break;
+			}
+		    }
+		    if (excluded) { break; }
+		}
+		for (Integer otherOverlappingSmallStubIndex : smallStubs) {
+		    if (overlappingSmallStubIndex == otherOverlappingSmallStubIndex) { continue; }
+		    Stub otherSmallStub = sortedStubs.get(otherOverlappingSmallStubIndex);
+		    for (int layer=overlappingStub.getInnerLayer(); layer <= overlappingStub.getOuterLayer(); layer++) {
+			if (stub1.layerEqualsAllowingForDoubleHits(overlappingStub, layer)) {
+			    // That's OK -- it's part of the main stub
+			} else if (otherSmallStub.getHitsInLayer(layer)==null || otherSmallStub.getHitsInLayer(layer).size()==0) {
+			    // That's OK -- no hits in that layer in the other stub
+			} else {
+			    excluded = true;
+			    break;
+			}
+		    }
+		    if (excluded) { break; }
+		}
+		if (!excluded) { acceptedMerges.add(overlappingSmallStubIndex); }
+	    }
+	    Set<Stub> stubsToMerge = new HashSet<Stub>();
+	    stubsToMerge.add(stub1);
+	    for (Integer acceptedMerge : acceptedMerges) {
+		if (m_debug) { System.out.println("  -> Merging in accepted stub ["+acceptedMerge+"]"); }
+		stubsToMerge.add(sortedStubs.get(acceptedMerge));
+	    }
+	    Stub mergedStub = mergeStubs(stubsToMerge);
+	    if (mergedStub.getLayersCount() >= 4) {
+		finalOutputStubs.add(mergedStub);
+		if (m_debug) { System.out.println("  -> Saved merged stub with "+mergedStub.getLayersCount()+" layers ("+mergedStub.getInnerLayer()+"-"+mergedStub.getOuterLayer()+")"); }
+	    }
+	}
+
+	// Validate that each hit is only used once, purging if needed.
+	Set<Long> usedHits = new HashSet<Long>();
+	Set<Long> multiplyUsedHits = new HashSet<Long>();
+	for (Stub stub : finalOutputStubs) {
+	    for (CalorimeterHit hit : stub.getAllHits()) {
+		long cellID = hit.getCellID();
+		if (usedHits.contains(cellID)) { 
+		    multiplyUsedHits.add(cellID);
+		}
+		usedHits.add(cellID);
+	    }
+	}
+	
+	// Turn stubs into clusters
+	Set<Long> finalUsedHits = new HashSet<Long>();
+	List<Cluster> finalOutputClusters = new Vector<Cluster>();
+	for (Stub stub : finalOutputStubs) {
+	    BasicCluster clus = new BasicCluster();
+	    for (CalorimeterHit hit : stub.getAllHits()) {
+		long cellID = hit.getCellID();
+		if ( ! multiplyUsedHits.contains(cellID) ) {
+		    clus.addHit(hit);
+		    finalUsedHits.add(cellID);
+		}
+	    }
+	    // Require >= 4 hits left.
+	    // Probably we should also check for stubs that got broken apart here too...
+	    if (clus.getCalorimeterHits().size()>=4) {
+		finalOutputClusters.add(clus);
+	    }
+	}
+	if (m_debug) { System.out.println("Return "+finalOutputClusters.size()+" stubs. Used "+finalUsedHits.size()+" hits."); }
+	return finalOutputClusters;
+    }
+
+    protected void subProcess(EventHeader event, HitMap inputAcceptedHits, HitMap inputHits, String outputNameMips, String outputNameHits) {
+	
+	//TrackClusterDriver findMIPsEcal = new TrackClusterDriver(inputHitMapEcal, "mipsECAL", "hitmapECAL");
+	//TrackClusterDriver findMIPsHcal = new TrackClusterDriver(inputHitMapHcal, "mipsHCAL", "hitmapHCAL");
+	//findMIPsHcal.setDebug(true); // TEMP -- FIXME
+	//findMIPsHcal.process(event);
+
+	// OK. What we want to do is to look for track stubs, then try to expand them.
+	// A stub is three hits in successive layers such that
+	//    * The [transverse?] distance from each hit to the next is below a threshold d_max
+	//    * The dot product of the displacement vectors between hits 1&2 and 2&3 is close to 1
+	//    * Most/all of the hits are isolated or near-isolated
+
+	// Begin by finding isolated and semi-isolated hits:
+	// Each isolated hit is a potential seed.
+	Map<Integer, List<CalorimeterHit>> isolatedHitsHcal = new HashMap<Integer, List<CalorimeterHit>>();
+	Map<Integer, List<CalorimeterHit>> semiIsolatedHitsHcal = new HashMap<Integer, List<CalorimeterHit>>();
+	Map<CalorimeterHit,CalorimeterHit> doubleHitMapHcal = new HashMap<CalorimeterHit,CalorimeterHit>();
+	long timeBeforeFindIsolatedAndSemiIsolatedHits = Calendar.getInstance().getTimeInMillis();
+	findIsolatedAndSemiIsolatedHits(inputAcceptedHits, inputHits, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal);
+	{
+	    Set<CalorimeterHit> tmpIsolatedHits = new HashSet<CalorimeterHit>();
+	    Set<CalorimeterHit> tmpSemiIsolatedHits = new HashSet<CalorimeterHit>();
+	    for (List<CalorimeterHit> tmpList : isolatedHitsHcal.values()) { tmpIsolatedHits.addAll(tmpList); }
+	    for (List<CalorimeterHit> tmpList : semiIsolatedHitsHcal.values()) { tmpSemiIsolatedHits.addAll(tmpList); }
+	    System.out.println("DEBUG: Started with "+inputAcceptedHits.size()+"/"+inputHits.size()+" accepted hits.");
+	    System.out.println("DEBUG: Found "+tmpIsolatedHits.size()+" isolated hits + "+tmpSemiIsolatedHits.size()+" semi-isolated hits.");
+	    System.out.println("DEBUG: Found "+doubleHitMapHcal.size()+" double-hits");
+	}
+	long timeAfterFindIsolatedAndSemiIsolatedHits = Calendar.getInstance().getTimeInMillis();
+	System.out.println("TIME to find isolated/semi-isolated hits: "+(timeAfterFindIsolatedAndSemiIsolatedHits-timeBeforeFindIsolatedAndSemiIsolatedHits)+" ms");
+
+	// This is fiddly because there is a lot of ambiguity in the early stages.
+	// So we first try to make three-hit stubs without checking for uniqueness
+	// (i.e. hits may appear in more than one stub)
+	// Stubs with hits (k, l, m) in layers (i, i+1, i+2) are uniquely defined iff:
+	//   * There is no other stub which contains 
+	//        hit k and a hit in layer i+1 but not hit l
+	//        hit k and a hit in layer i+2 but not hit m
+	//        hit l and a hit in layer i   but not hit k
+	//        hit l and a hit in layer i+2 but not hit m
+	//        hit m and a hit in layer i   but not hit k
+	//        hit m and a hit in layer i+1 but not hit l
+	// Otherwise, the stub is not uniquely defined.
+	// If stubs are uniquely defined:
+	//   Note that two stubs can both be uniquely defined, yet overlap at the
+	//   edge (1 or 2 hits). This is OK **if** they can be connected. Otherwise,
+	//     * Overlap of 1 hit: discard both and veto that hit
+	//     * Overlap of 2 hits: discard both, but don't veto hits.
+	// If stubs are not uniquely defined:
+	//   * Case where two hits in adjacent layers are shared: Might have picked up a
+	//     wonky hit in third layer. Check by seeing if either stub can be expanded
+	//     in that direction. If one can and the other can't, discard the
+	//     non-expandable one. If both can, discard both (ambiguity/overlap)
+	//   * Case where two hits in layers (i, i+2) are shared: ugly. Shouldn't happen
+	//     if dot-product cuts are set right... but if it does happen, discard both.
+
+	// Look for nearby pairs of hits in adjacent layers.
+	// Watch out: What about the barrel/endcap corner?
+	double allowedSeparation3D = 50.0;
+	List<Stub> twoLayerStubs = findTwoLayerStubs(isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D);
+	System.out.println("DEBUG: Found "+twoLayerStubs.size()+" two-layer stubs.");
+	long timeAfterFindTwoLayerStubs = Calendar.getInstance().getTimeInMillis();
+	System.out.println("TIME to find two-layer stubs: "+(timeAfterFindTwoLayerStubs-timeAfterFindIsolatedAndSemiIsolatedHits)+" ms");
+
+	// Now try to add a third hit.
+	// Since we don't have any forwards/backwards preference at
+	// this point, we can equally well expand in either direction.
+	// Try both + and - directions, keeping in mind that we'll make
+	// some redundant stubs.
+	double minimumDotProduct = 0.95;
+	Map<Integer, List<Stub>> threeLayerStubsOrganisedByInnerLayer = new HashMap<Integer, List<Stub>>();
+	for (Stub twoLayerStub : twoLayerStubs) {
+	    int baseStubInnerLayer = twoLayerStub.getInnerLayer();
+	    // First, try to expand outward
+	    List<Stub> stubCandidatesExpandingOutward = expandStubSimple(twoLayerStub, +1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+	    if (stubCandidatesExpandingOutward.size() > 0) {
+		System.out.println("DEBUG: 2-hit stub expanded outward -> "+stubCandidatesExpandingOutward.size()+" 3-hit stubs.");
+		int innerLayerAfterExpandingOutward = baseStubInnerLayer;
+		List<Stub> existingThreeLayerStubs = threeLayerStubsOrganisedByInnerLayer.get(innerLayerAfterExpandingOutward);
+		if (existingThreeLayerStubs == null) {
+		    existingThreeLayerStubs = new Vector<Stub>();
+		}
+		for (Stub newStub : stubCandidatesExpandingOutward) {
+		    //System.out.println("DEBUG: Expanding outward, testing stub with hits in layers "+newStub.getInnerLayer()+"-"+newStub.getOuterLayer());
+		    if (newStub.getInnerLayer() != innerLayerAfterExpandingOutward) { throw new AssertionError("Book-keeping error"); }
+		    // Now, we need to check whether the new stub is a match for existing one(s)
+		    boolean foundIdenticalStub = false;
+		    for (Stub existingStub : existingThreeLayerStubs) {
+			if (newStub.equals(existingStub)) {
+			    foundIdenticalStub = true;
+			    break;
+			}
+		    }
+		    if (foundIdenticalStub) {
+			// Already an exact duplicate -- no new information here
+			//System.out.println("DEBUG:  * foundIdenticalStub");
+		    } else {
+			// No exact match already -- what about near-matches?
+			List<Stub> nearMatches = new Vector<Stub>();
+			for (Stub existingStub : existingThreeLayerStubs) {
+			    if (newStub.equalsAllowingForDoubleHits(existingStub)) {
+				nearMatches.add(existingStub);
+			    }
+			}
+			if (nearMatches.size()>0) {
+			    //System.out.println("DEBUG:  * found "+nearMatches.size()+" near-matches");
+			    int countRemovedStubs = 0;
+			    for (Stub nearMatch : nearMatches) {
+				boolean removedOK = existingThreeLayerStubs.remove(nearMatch);
+				if (removedOK) { countRemovedStubs++; }
+			    }
+			    //System.out.println("DEBUG:  * removed "+countRemovedStubs+" near-matches");
+			    nearMatches.add(newStub);
+			    Stub mergedStub = mergeStubs(nearMatches);
+			    existingThreeLayerStubs.add(mergedStub);
+			} else {
+			    // No matches at all
+			    existingThreeLayerStubs.add(newStub);
+			}
+		    }
+		}
+		threeLayerStubsOrganisedByInnerLayer.put(innerLayerAfterExpandingOutward, existingThreeLayerStubs);
+	    }
+	    // Then, try to expand inward
+	    List<Stub> stubCandidatesExpandingInward = expandStubSimple(twoLayerStub, -1, isolatedHitsHcal, semiIsolatedHitsHcal, doubleHitMapHcal, allowedSeparation3D, minimumDotProduct);
+	    if (stubCandidatesExpandingInward.size() > 0) {
+		System.out.println("DEBUG: 2-hit stub expanded inward -> "+stubCandidatesExpandingInward.size()+" 3-hit stubs.");
+		int innerLayerAfterExpandingInward = baseStubInnerLayer - 1;
+		List<Stub> existingThreeLayerStubs = threeLayerStubsOrganisedByInnerLayer.get(innerLayerAfterExpandingInward);
+		if (existingThreeLayerStubs == null) {
+		    existingThreeLayerStubs = new Vector<Stub>();
+		}
+		for (Stub newStub : stubCandidatesExpandingInward) {
+		    //System.out.println("DEBUG: Expanding inward, testing stub with hits in layers "+newStub.getInnerLayer()+"-"+newStub.getOuterLayer());
+		    if (newStub.getInnerLayer() != innerLayerAfterExpandingInward) { throw new AssertionError("Book-keeping error"); }
+		    // Now, we need to check whether the new stub is a match for existing one(s)
+		    boolean foundIdenticalStub = false;
+		    for (Stub existingStub : existingThreeLayerStubs) {
+			if (newStub.equals(existingStub)) {
+			    foundIdenticalStub = true;
+			    break;
+			}
+		    }
+		    if (foundIdenticalStub) {
+			// Already an exact duplicate -- no new information here
+			//System.out.println("DEBUG:  * foundIdenticalStub");
+		    } else {
+			// No exact match already -- what about near-matches?
+			List<Stub> nearMatches = new Vector<Stub>();
+			for (Stub existingStub : existingThreeLayerStubs) {
+			    if (newStub.equalsAllowingForDoubleHits(existingStub)) {
+				nearMatches.add(existingStub);
+			    }
+			}
+			if (nearMatches.size()>0) {
+			    //System.out.println("DEBUG:  * found "+nearMatches.size()+" near-matches");
+			    int countRemovedStubs = 0;
+			    for (Stub nearMatch : nearMatches) {
+				boolean removedOK = existingThreeLayerStubs.remove(nearMatch);
+				if (removedOK) { countRemovedStubs++; }
+			    }
+			    //System.out.println("DEBUG:  * removed "+countRemovedStubs+" near-matches");
+			    nearMatches.add(newStub);
+			    Stub mergedStub = mergeStubs(nearMatches);
+			    existingThreeLayerStubs.add(mergedStub);
+			} else {
+			    // No matches at all
+			    existingThreeLayerStubs.add(newStub);
+			}
+		    }
+		}
+		threeLayerStubsOrganisedByInnerLayer.put(innerLayerAfterExpandingInward, existingThreeLayerStubs);
+	    }
+	}
+	List<Stub> threeLayerStubs = new Vector<Stub>();
+	for (List<Stub> stubList : threeLayerStubsOrganisedByInnerLayer.values()) {
+	    threeLayerStubs.addAll(stubList);
+	}
+	System.out.println("DEBUG: Started with "+threeLayerStubs.size()+" 3-layer stubs");
+	long timeAfterFindThreeLayerStubs = Calendar.getInstance().getTimeInMillis();
+	System.out.println("TIME to find three-layer stubs: "+(timeAfterFindThreeLayerStubs-timeAfterFindTwoLayerStubs)+" ms");
+
+	// If any of these are redundant, merge them:
+
+	//System.out.println("DEBUG: After merging, left with "+mergedStubs.size()+" 3-layer stubs");
+	//long timeAfterMergeThreeLayerStubs = Calendar.getInstance().getTimeInMillis();
+	//System.out.println("TIME to merge three-layer stubs: "+(timeAfterMergeThreeLayerStubs-timeAfterFindThreeLayerStubs)+" ms");
+	List<Stub> mergedStubs = threeLayerStubs;
+
+	List<List<Stub>> matches_APlus = findAPlusMatches(mergedStubs, minimumDotProduct);
+	long timeAfterFindAPlusMatches = Calendar.getInstance().getTimeInMillis();
+	System.out.println("TIME to find A+ matches: "+(timeAfterFindAPlusMatches-timeAfterFindThreeLayerStubs)+" ms");
+
+	Map<Stub,Stub> matchesOutward = new HashMap<Stub,Stub>();
+	Map<Stub,Stub> matchesInward = new HashMap<Stub,Stub>();
+	for (List<Stub> match : matches_APlus) {
+	    if (match.size() != 2) { throw new AssertionError("Book-keeping error"); }
+	    Stub stub1 = match.get(0);
+	    Stub stub2 = match.get(1);
+	    Stub innerStub = null;
+	    Stub outerStub = null;
+	    if (stub1.getInnerLayer() < stub2.getInnerLayer()) {
+		innerStub = stub1;
+		outerStub = stub2;
+	    } else {
+		innerStub = stub2;
+		outerStub = stub1;
+	    }		
+	    if (matchesOutward.get(innerStub) != null) { throw new AssertionError("Book-keeping error"); }
+	    if (matchesInward.get(outerStub) != null) { throw new AssertionError("Book-keeping error"); }
+	    matchesOutward.put(innerStub,outerStub);
[truncated at 1000 lines; 1979 more skipped]
CVSspam 0.2.8