Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
MinimalRunPFA.java+22-11.5 -> 1.6
ReclusterDTreeDriver.java+101-2951.56 -> 1.57
ReclusterDriver.java+7-11.42 -> 1.43
RemoveHitsFromClusters.java-201.1 -> 1.2
SetUpDTreeForReclustering.java+124-221.10 -> 1.11
+254-339
5 modified files
MJC: (contrib) Checkpoint while refactoring PFA

lcsim/src/org/lcsim/contrib/uiowa
MinimalRunPFA.java 1.5 -> 1.6
diff -u -r1.5 -r1.6
--- MinimalRunPFA.java	13 Oct 2008 06:34:03 -0000	1.5
+++ MinimalRunPFA.java	15 Oct 2008 22:10:25 -0000	1.6
@@ -45,8 +45,29 @@
 	add(new org.lcsim.contrib.uiowa.SetUpDTreeForReclustering(allHitLists, recoHitLists, mstHitLists, findCluster));
 
 	// Set up and run PFA
-	ReclusterDTreeDriver reclusTree = new ReclusterDTreeDriver("DTreeClusters", "FSReconTracksWithoutMuons", "ReconFSParticles", "MuonTrackClusterMap", findCluster);
+	ReclusterDTreeDriver reclusTree = new ReclusterDTreeDriver("DTreeClusters", "UnmatchedTracksAfterAmbigClusterMap", "ReconFSParticles", "MuonTrackClusterMap", findCluster);
 	reclusTree.writeExtraEventOutput(writeExtraOutput);
+	reclusTree.addInputMips("OldMipsInsideTreesECAL");
+	reclusTree.addInputMips("NewMipsInsideTreesECAL");
+	reclusTree.addInputMips("OldMipsInsideTreesHCAL");
+	reclusTree.addInputMips("NewMipsInsideTreesHCAL");
+	reclusTree.addInputMips("OldMipsInsideTreesMCAL");
+	reclusTree.addInputMips("NewMipsInsideTreesMCAL");
+	reclusTree.addInputClumps("ClumpsInsideTreesECAL");
+	reclusTree.addInputClumps("ClumpsInsideTreesHCAL");
+	reclusTree.addInputClumps("ClumpsInsideTreesMCAL");
+	reclusTree.addInputBlocks("BlocksInsideTreesECAL");
+	reclusTree.addInputBlocks("BlocksInsideTreesHCAL");
+	reclusTree.addInputBlocks("BlocksInsideTreesMCAL");
+	reclusTree.addInputLeftoverHits("LeftoverHitsInsideTreesECAL");
+	reclusTree.addInputLeftoverHits("LeftoverHitsInsideTreesHCAL");
+	reclusTree.addInputLeftoverHits("LeftoverHitsInsideTreesMCAL");
+	reclusTree.addTrackToClusterMap("MapElectronTracksToClusters");
+	reclusTree.addTrackToClusterMap("MuonTrackClusterMap");
+	reclusTree.addTrackToClusterMap("MapPreShowerMipTracksToClusterSeeds");
+	reclusTree.addTrackToClusterMap("MapMipClusterTracksToClusterSeeds");
+	reclusTree.addTrackToClusterMap("MapGenClusterTracksToClusterSeeds");
+	reclusTree.addTrackToClusterMap("MapAmbigClusterTracksToClusterSeeds");
 	add(reclusTree);
 
 	// Write out, make plots

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.56 -> 1.57
diff -u -r1.56 -r1.57
--- ReclusterDTreeDriver.java	13 Oct 2008 06:34:31 -0000	1.56
+++ ReclusterDTreeDriver.java	15 Oct 2008 22:10:25 -0000	1.57
@@ -35,7 +35,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.56 2008/10/13 06:34:31 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.57 2008/10/15 22:10:25 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -59,7 +59,7 @@
     protected boolean m_oldMipFinderCrossesTrees = true;
     protected boolean m_useNewMipFinder = true;
     protected boolean m_useOldMipFinder = true;
-    protected boolean m_removePoorQualityMips = false;
+    protected boolean m_removePoorQualityMips = false; // DANGER: STILL USED
     protected boolean m_clusterAsJets = true;
     protected boolean m_ignorePunchThroughTracksForJets = true;
     protected boolean m_useTracksThatDontReachCalorimeter = true;
@@ -74,14 +74,14 @@
 
     protected boolean m_allowNeutralCalibForEoverP = false;
 
-    protected int m_minHitsToBeTreatedAsClusterECAL = 15;
-    protected int m_minHitsToBeTreatedAsClusterHCAL = 20;
-    protected int m_minHitsToBeTreatedAsClusterMCAL = 5;
-    protected int m_minHitsToBeTreatedAsClusterFCAL = m_minHitsToBeTreatedAsClusterECAL;
-    protected double m_newMipFinderRadiusECAL = 20.0;
-    protected double m_newMipFinderRadiusHCAL = 50.0;
-    protected double m_newMipFinderRadiusMCAL = 100.0;
-    protected double m_newMipFinderRadiusFCAL = m_newMipFinderRadiusECAL;
+    //protected int m_minHitsToBeTreatedAsClusterECAL = 15;
+    //protected int m_minHitsToBeTreatedAsClusterHCAL = 20;
+    //protected int m_minHitsToBeTreatedAsClusterMCAL = 5;
+    //protected int m_minHitsToBeTreatedAsClusterFCAL = m_minHitsToBeTreatedAsClusterECAL;
+    protected double m_newMipFinderRadiusECAL = 20.0; // DANGER: STILL USED
+    protected double m_newMipFinderRadiusHCAL = 50.0; // DANGER: STILL USED
+    //protected double m_newMipFinderRadiusMCAL = 100.0;
+    //protected double m_newMipFinderRadiusFCAL = m_newMipFinderRadiusECAL;
     protected boolean m_allowSharingOfIsolatedHits = true;
 
     protected double m_jetScoreThreshold = 0.7; // don't hard-code
@@ -98,6 +98,17 @@
     protected boolean m_useMucalEndcap = true;
     protected boolean m_useFcal = false;
 
+    protected List<String> m_inputMips = new Vector<String>();
+    protected List<String> m_inputClumps = new Vector<String>();
+    protected List<String> m_inputBlocks = new Vector<String>();
+    protected List<String> m_inputLeftoverHits = new Vector<String>();
+    protected List<String> m_inputTrackClusterMaps = new Vector<String>();
+    public void addInputMips(String name) { m_inputMips.add(name); }
+    public void addInputClumps(String name) { m_inputClumps.add(name); }
+    public void addInputBlocks(String name) { m_inputBlocks.add(name); }
+    public void addInputLeftoverHits(String name) { m_inputLeftoverHits.add(name); }
+    public void addTrackToClusterMap(String name) { m_inputTrackClusterMaps.add(name); }
+
     public void writeExtraEventOutput(boolean writeExtra) {
 	m_writeExtraEventOutput = writeExtra;
     }
@@ -122,78 +133,6 @@
 	m_eval = new LikelihoodEvaluatorWrapper();
 	m_outputParticleListName = "DTreeReclusteredParticles";
 	m_muonTrackClusterMapName = muonTrackClusterMap;
-
-	// Look for hits near boundaries:
-	HitNearBarrelEndcapBoundaryDecision dec = new HitNearBarrelEndcapBoundaryDecision(6.0, 15.0, 1);
-	add(dec);
-	add(new ListFilterDriver(dec, "EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
-	add(new ListFilterDriver(dec, "HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
-	add(new ListFilterDriver(dec, "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
-	add(new ListFilterDriver(dec, "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
-	add(new TransientFlagDriver("EcalBarrDigiHitsNearBoundary"));
-	add(new TransientFlagDriver("HcalBarrDigiHitsNearBoundary"));
-	add(new TransientFlagDriver("EcalEndcapDigiHitsNearBoundary"));
-	add(new TransientFlagDriver("HcalEndcapDigiHitsNearBoundary"));
-	
-	// Set up driver to look for structure inside clusters:
-	FindSubClusters clusDriverECAL = new FindSubClusters("DTreeClustersECAL", m_newMipFinderRadiusECAL, m_minHitsToBeTreatedAsClusterECAL, m_removePoorQualityMips, "OldMipsInsideTreesECAL", "NewMipsInsideTreesECAL", "ClumpsInsideTreesECAL", "BlocksInsideTreesECAL", "LeftoverHitsInsideTreesECAL", "MapTreeToTargetsECAL", "MapSharedToTreeECAL");
-	FindSubClusters clusDriverHCAL = new FindSubClusters("DTreeClustersHCAL", m_newMipFinderRadiusHCAL, m_minHitsToBeTreatedAsClusterHCAL, m_removePoorQualityMips, "OldMipsInsideTreesHCAL", "NewMipsInsideTreesHCAL", "ClumpsInsideTreesHCAL", "BlocksInsideTreesHCAL", "LeftoverHitsInsideTreesHCAL", "MapTreeToTargetsHCAL", "MapSharedToTreeHCAL");
-	FindSubClusters clusDriverMCAL = new FindSubClusters("DTreeClustersMCAL", m_newMipFinderRadiusMCAL, m_minHitsToBeTreatedAsClusterMCAL, m_removePoorQualityMips, "OldMipsInsideTreesMCAL", "NewMipsInsideTreesMCAL", "ClumpsInsideTreesMCAL", "BlocksInsideTreesMCAL", "LeftoverHitsInsideTreesMCAL", "MapTreeToTargetsMCAL", "MapSharedToTreeMCAL");
-	FindSubClusters clusDriverFCAL = new FindSubClusters("DTreeClustersFCAL", m_newMipFinderRadiusFCAL, m_minHitsToBeTreatedAsClusterFCAL, m_removePoorQualityMips, "OldMipsInsideTreesFCAL", "NewMipsInsideTreesFCAL", "ClumpsInsideTreesFCAL", "BlocksInsideTreesFCAL", "LeftoverHitsInsideTreesFCAL", "MapTreeToTargetsFCAL", "MapSharedToTreeFCAL");
-	/*
-	 * FindSubClusters clusDriverMCALforMuonID = new FindSubClusters("DTreeClustersMCALforMuonID", m_newMipFinderRadiusMCAL, m_minHitsToBeTreatedAsClusterMCAL, m_removePoorQualityMips, "OldMipsInsideTreesMCALforMuonID", "NewMipsInsideTreesMCALforMuonID", "ClumpsInsideTreesMCALforMuonID", "BlocksInsideTreesMCALforMuonID", "LeftoverHitsInsideTreesMCALforMuonID", "MapTreeToTargetsMCALforMuonID", "MapSharedToTreeMCALforMuonID");
-	 */
-	if (m_oldMipFinderCrossesTrees) {
-	    clusDriverECAL.enableBarrelEndcapCrossing("EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary");
-	    clusDriverHCAL.enableBarrelEndcapCrossing("HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary");
-	}
-	if (m_findExtraNNClusters) {
-	    clusDriverECAL.setNNrange(1,1,1);
-	    clusDriverHCAL.setNNrange(2,2,1);
-	    clusDriverMCAL.setNNrange(2,2,1);
-            /*
-	     * clusDriverMCALforMuonID.setNNrange(2,2,1);
-	     */
-	    clusDriverFCAL.setNNrange(1,1,1);
-	}
-	add(clusDriverECAL);
-	add(clusDriverHCAL);
-	// Avoid too much output:
-	add(new TransientFlagDriver("OldMipsInsideTreesECAL"));
-	add(new TransientFlagDriver("NewMipsInsideTreesECAL"));
-	add(new TransientFlagDriver("ClumpsInsideTreesECAL"));
-	add(new TransientFlagDriver("BlocksInsideTreesECAL"));
-	add(new TransientFlagDriver("LeftoverHitsInsideTreesECAL"));
-	add(new TransientFlagDriver("OldMipsInsideTreesHCAL"));
-	add(new TransientFlagDriver("NewMipsInsideTreesHCAL"));
-	add(new TransientFlagDriver("ClumpsInsideTreesHCAL"));
-	add(new TransientFlagDriver("BlocksInsideTreesHCAL"));
-	add(new TransientFlagDriver("LeftoverHitsInsideTreesHCAL"));
-	// Write out mucal, fcal if needed:
-	if (m_useMucalBarrel || m_useMucalEndcap) {
-	    add(clusDriverMCAL);
-	    add(new TransientFlagDriver("OldMipsInsideTreesMCAL"));
-	    add(new TransientFlagDriver("NewMipsInsideTreesMCAL"));
-	    add(new TransientFlagDriver("ClumpsInsideTreesMCAL"));
-	    add(new TransientFlagDriver("BlocksInsideTreesMCAL"));
-	    add(new TransientFlagDriver("LeftoverHitsInsideTreesMCAL"));
-	    /* 
-	     * add(clusDriverMCALforMuonID);
-	     * add(new TransientFlagDriver("OldMipsInsideTreesMCALforMuonID"));
-	     * add(new TransientFlagDriver("NewMipsInsideTreesMCALforMuonID"));
-	     * add(new TransientFlagDriver("ClumpsInsideTreesMCALforMuonID"));
-	     * add(new TransientFlagDriver("BlocksInsideTreesMCALforMuonID"));
-	     * add(new TransientFlagDriver("LeftoverHitsInsideTreesMCALforMuonID"));
-	     */
-	}
-	if (m_useFcal) {
-	    add(clusDriverFCAL);
-	    add(new TransientFlagDriver("OldMipsInsideTreesFCAL"));
-	    add(new TransientFlagDriver("NewMipsInsideTreesFCAL"));
-	    add(new TransientFlagDriver("ClumpsInsideTreesFCAL"));
-	    add(new TransientFlagDriver("BlocksInsideTreesFCAL"));
-	    add(new TransientFlagDriver("LeftoverHitsInsideTreesFCAL"));
-	}
     }
 
     public void process(EventHeader event) {
@@ -211,12 +150,6 @@
 	List<Cluster> dTreeClusters = event.get(Cluster.class, m_dTreeClusterListName);
 	List<Track> trackList = event.get(Track.class, m_inputTrackList);
 	System.out.println("DEBUG: "+this.getClass().getName()+" read in list of "+trackList.size()+" tracks named "+m_inputTrackList);
-        //TEST-MUON//Map<Track,Set<Cluster>> newMapTrackToMip = ((Map<Track, Set<Cluster>>)(event.get("newMapTrackToMip")));
-        //TEST-MUON//List<Track> muonTracks = new Vector<Track>();
-        //TEST-MUON//for(Track tr : newMapTrackToMip.keySet()) { muonTracks.add(tr);}
-        //TEST-MUON//boolean sub_muon = trackList.removeAll(muonTracks);
-        //TEST-MUON//if(sub_muon) { System.out.println("track list changed due to the muon list");}
-        //TEST/MUON//else { System.out.println("There is no muon track for this event"); }
 	List<Cluster> photons = event.get(Cluster.class, "PhotonClustersForDTree");
 	List<Cluster> largeClusters = dTreeClusters; // FIXME: NOT IDEAL! Perhaps run MST on DTree clusters?
 	if (trackList == null) { throw new AssertionError("Null track list!"); }
@@ -268,30 +201,20 @@
 	List<Cluster> clumps = new Vector<Cluster>();
 	List<Cluster> leftoverHitClusters = new Vector<Cluster>();
 	List<Cluster> treesWithNoStructure = new Vector<Cluster>();
-	mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesECAL"));
-	mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesHCAL"));
-	mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesECAL"));
-	mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesHCAL"));
-	clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesECAL"));
-	clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesHCAL"));
-	leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesECAL"));
-	leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesHCAL"));
-	treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesECAL"));
-	treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesHCAL"));
-	if (m_useMucalBarrel || m_useMucalEndcap) {
-	    mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesMCAL"));
-	    mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesMCAL"));
-	    clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesMCAL"));
-	    leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesMCAL"));
-	    treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesMCAL"));
+
+	for (String name : m_inputMips) {
+	    mipsOld.addAll(event.get(Cluster.class, name)); // FIXME: OLD VS NEW MIPS?
 	}
-	if (m_useFcal) {
-	    mipsOld.addAll(event.get(Cluster.class, "OldMipsInsideTreesFCAL"));
-	    mipsNew.addAll(event.get(Cluster.class, "NewMipsInsideTreesFCAL"));
-	    clumps.addAll(event.get(Cluster.class, "ClumpsInsideTreesFCAL"));
-	    leftoverHitClusters.addAll(event.get(Cluster.class, "LeftoverHitsInsideTreesFCAL"));
-	    treesWithNoStructure.addAll(event.get(Cluster.class, "BlocksInsideTreesFCAL"));
+	for (String name : m_inputClumps) {
+	    clumps.addAll(event.get(Cluster.class, name));
+	}
+	for (String name : m_inputBlocks) {
+	    treesWithNoStructure.addAll(event.get(Cluster.class, name));
 	}
+	for (String name : m_inputLeftoverHits) {
+	    leftoverHitClusters.addAll(event.get(Cluster.class, name));
+	}
+
 	List<Cluster> mips = new Vector<Cluster>();
 	mips.addAll(mipsOld);
 	mips.addAll(mipsNew);
@@ -312,195 +235,76 @@
 	}
 
 
-	/*
-	 * //For muon ID
-	 * List<Cluster> mipsMuon = new Vector<Cluster>();
-	 * mipsMuon.addAll(event.get(Cluster.class, "OldMipsInsideTreesMCALforMuonID"));
-	 * mipsMuon.addAll(event.get(Cluster.class, "NewMipsInsideTreesMCALforMuonID"));
-	 * mipsMuon.addAll(event.get(Cluster.class, "ClumpsInsideTreesMCALforMuonID"));
-	 * * if (m_debug) {
-	 * System.out.println("Found "+mips.size()+" mips, "+clumps.size()+" clumps, "+photons.size()+" photons, "+leftoverHitClusters.size()+" leftover-hit-clusters, "+treesWithNoStructure.size()+" large DTrees with no structure, and "+trackList.size()+" tracks in event.");
-	 * }
-	 */
-
-	// Also load book-keeping maps that record what subclusters or hits
+	// Also make book-keeping maps that record what subclusters or hits
 	// belong to what tree:
-	Map<Cluster, List<Cluster>> targetsInTreeECAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsECAL")));
-	Map<Cluster, Cluster> treeOfSharedClusterECAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeECAL")));
-	Map<Cluster, List<Cluster>> targetsInTreeHCAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsHCAL")));
-	Map<Cluster, Cluster> treeOfSharedClusterHCAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeHCAL")));
-	Map<Cluster, List<Cluster>> targetsInTreeMCAL = null;
-	Map<Cluster, Cluster> treeOfSharedClusterMCAL = null;
-	Map<Cluster, List<Cluster>> targetsInTreeFCAL = null;
-	Map<Cluster, Cluster> treeOfSharedClusterFCAL = null;
-	if (m_useMucalBarrel || m_useMucalEndcap) {
-	    targetsInTreeMCAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsMCAL")));
-	    treeOfSharedClusterMCAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeMCAL")));
-	}
-	if (m_useFcal) {
-	    targetsInTreeFCAL = ((Map<Cluster, List<Cluster>>)(event.get("MapTreeToTargetsFCAL")));
-	    treeOfSharedClusterFCAL = ((Map<Cluster, Cluster>)(event.get("MapSharedToTreeFCAL")));
-	}
-
 	Map<Cluster, List<Cluster>> targetsInTree = new HashMap<Cluster, List<Cluster>>();
 	Map<Cluster, Cluster> treeOfSharedCluster = new HashMap<Cluster,Cluster>();
-	targetsInTree.putAll(targetsInTreeECAL);
-	targetsInTree.putAll(targetsInTreeHCAL);
-	treeOfSharedCluster.putAll(treeOfSharedClusterECAL);
-	treeOfSharedCluster.putAll(treeOfSharedClusterHCAL);
-	if (m_useMucalBarrel || m_useMucalEndcap) {
-	    targetsInTree.putAll(targetsInTreeMCAL);
-	    treeOfSharedCluster.putAll(treeOfSharedClusterMCAL);
-	}
-	if (m_useFcal) {
-	    targetsInTree.putAll(targetsInTreeFCAL);
-	    treeOfSharedCluster.putAll(treeOfSharedClusterFCAL);
+	{
+	    Set<Cluster> targets = new HashSet<Cluster>();
+	    targets.addAll(mips);
+	    targets.addAll(clumps);
+	    targets.addAll(treesWithNoStructure);
+	    Map<Long,Cluster> mapCellToTarget = new HashMap<Long,Cluster>();
+	    Map<Long,Cluster> mapCellToLeftover = new HashMap<Long,Cluster>();
+	    for (Cluster target : targets) {
+		for (CalorimeterHit hit : target.getCalorimeterHits()) {
+		    long id = hit.getCellID();
+		    Cluster test = mapCellToTarget.get(id);
+		    if (test != null) { throw new AssertionError("Book-keeping error"); }
+		    mapCellToTarget.put(id, target);
+		}
+	    }
+	    for (Cluster clus : leftoverHitClusters) {
+		for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+		    long id = hit.getCellID();
+		    Cluster test1 = mapCellToTarget.get(id);
+		    Cluster test2 = mapCellToLeftover.get(id);
+		    if (test1 != null) { throw new AssertionError("Book-keeping error"); }
+		    if (test2 != null) { throw new AssertionError("Book-keeping error"); }
+		    mapCellToLeftover.put(id, clus);
+		}
+	    }
+	    for (Cluster tree : dTreeClusters) {
+		Set<Cluster> matchedTargets = new HashSet<Cluster>();
+		for (CalorimeterHit hit : tree.getCalorimeterHits()) {
+		    long id = hit.getCellID();
+		    Cluster target = mapCellToTarget.get(id);
+		    if (target != null) {
+			matchedTargets.add(target);
+		    }
+		    Cluster shared = mapCellToLeftover.get(id);
+		    if (shared != null) {
+			Cluster test = treeOfSharedCluster.get(shared);
+			if (test != null && test != tree) { throw new AssertionError("Book-keeping error: Shared/leftover cluster with "+shared.getCalorimeterHits().size()+" was found inside a tree of "+tree.getCalorimeterHits().size()+" hits but had already been seen inside a tree of "+test.getCalorimeterHits().size()+" hits."); }
+			treeOfSharedCluster.put(shared, tree);
+		    }
+		}
+		List<Cluster> matchedTargetsList = new Vector<Cluster>();
+		matchedTargetsList.addAll(matchedTargets);
+		targetsInTree.put(tree, matchedTargetsList);
+	    }
 	}
 
-	// Legacy maps, no longer used.
-	Map<Cluster, Cluster> treeOfMip = new HashMap<Cluster, Cluster>();
-	Map<Cluster, Cluster> treeOfClump = new HashMap<Cluster,Cluster>();
-	Map<Cluster, Cluster> treeOfLeftoverHits = new HashMap<Cluster,Cluster>();
-
-	// Identify the start point of showers
-	m_findCluster.process(m_event); // picks up geometry
-
 	// Match tracks
 	// ------------
 	Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
 	Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
 
-	List<Track> preLinkedTracks = new Vector<Track>();
-	boolean m_useMipInfo = true;
-	if (m_useMipInfo) {
-	    // Read in MIP connections
-	    Map<Track,BasicCluster> MapTrkToMIP = (Map<Track,BasicCluster>)(event.get("ShowerFinderMapTrackToMip"));
-
-	    // Now, each track is connected to a MIP. But some of these MIP clusters
-	    // may overlap. We need to identify the cases when that happens and then
-	    //   * produce a merged cluster
-	    //   * have each of the tracks pointing to the same merged cluster
-	    // First, check for overlaps...
-	    Map<Cluster,Track> mapMipToTrack = new HashMap<Cluster,Track>();
-	    Map<Cluster,Cluster> mapMipToMergedCluster = new HashMap<Cluster,Cluster>();
-	    Map<Cluster,List<Track>> mapMergedClusterToTracks = new HashMap<Cluster,List<Track>>();
-
-	    // Find hits for each MIP & which clusters they're inside
-	    Map<CalorimeterHit,Set<Cluster>> hitMipMap = new HashMap<CalorimeterHit,Set<Cluster>>();
-	    for (Track tr : MapTrkToMIP.keySet()) {
-		BasicCluster mip = MapTrkToMIP.get(tr);
-		mapMipToTrack.put(mip,tr);
-		for (CalorimeterHit hit : mip.getCalorimeterHits()) {
-		    Set<Cluster> mipsOfHit = hitMipMap.get(hit);
-		    if (mipsOfHit == null) {
-			mipsOfHit= new HashSet<Cluster>();
-			hitMipMap.put(hit, mipsOfHit);
-		    }
-		    mipsOfHit.add(mip);
-		}
-	    }
-	    // Requirements for this List:
-	    //  * Each cluster appears in exactly one Set
-	    List<Set<Cluster>> mipOverlapSets = new Vector<Set<Cluster>>();
-	    // Start filling the List:
-	    for (CalorimeterHit hit : hitMipMap.keySet()) {
-		Set<Cluster> touchedClusters = hitMipMap.get(hit);
-		Set<Set<Cluster>> oldLinkedClusterSets = new HashSet<Set<Cluster>>();
-		for (Cluster clus : touchedClusters) {
-		    for (Set<Cluster> currentSet : mipOverlapSets) {
-			if (currentSet.contains(clus)) {
-			    oldLinkedClusterSets.add(currentSet);
-			}
-		    }
-		}
-		Set<Cluster> newLinkedClusterSet = new HashSet<Cluster>();
-		newLinkedClusterSet.addAll(touchedClusters);
-		for (Set<Cluster> oldSet : oldLinkedClusterSets) {
-		    newLinkedClusterSet.addAll(oldSet);
-		    mipOverlapSets.remove(oldSet);
-		}
-		mipOverlapSets.add(newLinkedClusterSet);
-	    }
-	    // Verify that requirement above is true, i.e.
-	    //  * Each cluster appears in exactly one Set
-	    List<Cluster> countedClusterList = new Vector<Cluster>();
-	    Set<Cluster> countedClusterSet = new HashSet<Cluster>();
-	    for (Set<Cluster> currentSet : mipOverlapSets) {
-		countedClusterList.addAll(currentSet);
-		countedClusterSet.addAll(currentSet);
-	    }
-	    if (countedClusterList.size() != MapTrkToMIP.size()) { throw new AssertionError("Book-keeping error"); }
-	    if (countedClusterSet.size() != MapTrkToMIP.size()) { throw new AssertionError("Book-keeping error"); }
-	    // Do merge
-	    for (Set<Cluster> currentSet : mipOverlapSets) {
-		if (currentSet.size()==0) {
-		    throw new AssertionError("Empty set!");
-		} else if (currentSet.size()==1) {
-		    Cluster mip = currentSet.iterator().next();
-		    mapMipToMergedCluster.put(mip,mip);
-		    Track tr = mapMipToTrack.get(mip);
-		    List<Track> mergedTracks = new Vector<Track>();
-		    mergedTracks.add(tr);
-		    mapMergedClusterToTracks.put(mip, mergedTracks);
-		} else {
-		    BasicCluster mergedMip = new BasicCluster();
-		    List<Track> mergedTracks = new Vector<Track>();
-		    Set<CalorimeterHit> mergedHits = new HashSet<CalorimeterHit>();
-		    for (Cluster mip : currentSet) {
-			mergedHits.addAll(mip.getCalorimeterHits());
-			Track tr = mapMipToTrack.get(mip);
-			mergedTracks.add(tr);
-		    }
-		    for (CalorimeterHit hit : mergedHits) {
-			mergedMip.addHit(hit);
-		    }
-		    for (Cluster clus : currentSet) {
-			mapMipToMergedCluster.put(clus, mergedMip);
-			mapMergedClusterToTracks.put(mergedMip, mergedTracks);
-		    }
-		}
-	    }
-
-	    // Assign MIPs to tracks, taking overlaps into account
-	    for (Cluster mergedMip : mapMergedClusterToTracks.keySet()) {
-		List<Track> tracks = mapMergedClusterToTracks.get(mergedMip);
-		if (tracks == null) { throw new AssertionError("Null tracks!"); }
-		if (tracks.size()==0) { 
-		    throw new AssertionError("Empty track list!"); 
-		} else if (tracks.size()==1) {
-		    // Unique
-		    Track tr = tracks.get(0);
-		    if (mergedMip.getCalorimeterHits().size() > 5) {
-			// Found a good MIP
-			System.out.println("DEBUG: Good pre-shower MIP with "+mergedMip.getCalorimeterHits().size()+" hits -- adding...");
-			mipsOld.add(mergedMip);
-			mips.add(mergedMip);
-			tracksMatchedToClusters.put(tr, mergedMip);
-			clustersMatchedToTracks.put(mergedMip, tracks);
-			preLinkedTracks.add(tr);
-		    } else {
-			// Didn't find a good mip
-			System.out.print("DEBUG: Dodgy pre-shower MIP with only "+mergedMip.getCalorimeterHits().size()+" hits -- not adding. Hits were:");
-			for (CalorimeterHit hit : mergedMip.getCalorimeterHits()) {
-			    System.out.print("  "+hit.getCellID());
-			}
-			System.out.println();
-			leftoverHitClusters.add(mergedMip);
-		    }
-		} else {
-		    // Overlap -- can't treat it as a MIP.
-		    System.out.println("DEBUG: Overlapping pre-shower MIP with "+mergedMip.getCalorimeterHits().size()+" hits matched to "+tracks.size()+" tracks.");
-		    treesWithNoStructure.add(mergedMip);
-		}
+	for (String mapName : m_inputTrackClusterMaps) {
+	    Map<Track,Cluster> currentMap = (Map<Track,Cluster>)(event.get(mapName));
+	    for (Track tr : currentMap.keySet()) {
+		// First, check we don't already have an assignment for this track
+		if (tr == null) { throw new AssertionError("Null track!"); }
+		if (tracksMatchedToClusters.get(tr) != null) { throw new AssertionError("Multiple entries for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()); }
+		// Now do the book-keeping
+		Cluster clus = currentMap.get(tr);
+		if (clus == null) { throw new AssertionError("Null cluster!"); }
+		tracksMatchedToClusters.put(tr, clus);
+		List<Track> tracksOfThisCluster = clustersMatchedToTracks.get(clus);
+		if (tracksOfThisCluster == null) { tracksOfThisCluster = new Vector<Track>(); clustersMatchedToTracks.put(clus, tracksOfThisCluster); }
+		tracksOfThisCluster.add(tr);		    
 	    }
 	}
-		
-
-	// For convenience, keep separate note of those tracks which were
-	// hard-linked to a MIP before and those which were not.
-	List<Track> nonPreLinkedTracks = new Vector<Track>();
-	nonPreLinkedTracks.addAll(trackList);
-	nonPreLinkedTracks.removeAll(preLinkedTracks);
 
 	// Handle photons
 	List<Cluster> chargedHadronLikePhotons = new Vector<Cluster>();
@@ -508,8 +312,7 @@
 	List<Cluster> photonLikePhotons = new Vector<Cluster>();
 	List<Cluster> electronClusters = new Vector<Cluster>();
 	List<Track>   electronTracks   = new Vector<Track>();
-	//photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, trackList, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
-	photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, nonPreLinkedTracks, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
+	photonHandling(photons, electronClusters, chargedHadronLikePhotons, modifiedPhotonClusters, photonLikePhotons, trackList, electronTracks, clustersMatchedToTracks, tracksMatchedToClusters);
 
 	// Resume track matching
 	List<Cluster> allMatchableClusters = new Vector<Cluster>();
@@ -524,7 +327,7 @@
 
 	if (m_debug) { System.out.println("Attempting to match "+allMatchableClusters.size()+" matchable clusters to "+trackList.size()+" tracks"); }
 	for (Track tr : trackList) {
-	    if (preLinkedTracks.contains(tr) || electronTracks.contains(tr)) {
+	    if (electronTracks.contains(tr)) {
 		continue; // Those are already assigned!
 	    }
 	    Cluster matchedCluster = m_trackClusterMatcher.matchTrackToCluster(tr, allMatchableClusters);
@@ -635,7 +438,7 @@
 	// Optionally, split photon seeds
 	List<Cluster> photonFragments = new Vector<Cluster>();
 	if (m_splitPhotonSeeds) {
-	    splitPhotonSeeds(clustersMatchedToTracks, tracksMatchedToClusters, modifiedPhotonClusters, electronClusters, photonLikePhotons, chargedHadronLikePhotons, photonFragments, mipsOld, mipsNew, mips, clumps, treeOfMip, treeOfClump, treeOfLeftoverHits, treesWithNoStructure);
+	    splitPhotonSeeds(clustersMatchedToTracks, tracksMatchedToClusters, modifiedPhotonClusters, electronClusters, photonLikePhotons, chargedHadronLikePhotons, photonFragments, mipsOld, mipsNew, mips, clumps, treesWithNoStructure);
 	}
 	// Unmatched tracks
 	List<Track> unmatchedTracks = new Vector<Track>();
@@ -2262,6 +2065,7 @@
 	}
     }
 
+    // FIXME: Only used in electron ID -- safe to delete?
     private double electronEnergyNormalizedResidual(Track tr, Cluster clus) {
 	double energyAssumingElectron = energy(clus, m_photonCalib);
 	double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
@@ -2274,6 +2078,7 @@
 	return (residual/estimatedError);
     }
 
+    // FIXME: Only used in electron ID -- safe to delete?
     private int countHitsInClusterInFirstLayers(Track tr, Cluster clus, int nLayers) {
 	Set<Long> allClusterHits = new HashSet<Long>();
 	for (CalorimeterHit hit : clus.getCalorimeterHits()) {
@@ -2292,6 +2097,7 @@
 	return countMatches;
     }
 
+    // FIXME: Only used in electron ID -- safe to delete?
     private int countHitsInCoreInFirstLayers(Track tr, Cluster clus, int nLayers) {
 	Set<Long> coreClusterHits = new HashSet<Long>();
 	for (CalorimeterHit hit : clus.getClusters().get(0).getCalorimeterHits()) {
@@ -2310,6 +2116,7 @@
 	return countMatches;
     }
 
+    // used in scanForPhotons -- no longer needed?
     private double impactParameterFromPhotonCoreToOrigin(Cluster clus) {
 	Cluster coreSubCluster = clus.getClusters().get(0);
 	BasicCluster copyOfCoreSubCluster = new BasicCluster();
@@ -2328,6 +2135,7 @@
 	return docaToOrigin;
     }
 
+    // FIXME: Only used in electron ID -- safe to delete?
     private double distanceFromTrackToPhotonCore(Track tr, Cluster clus) {
 	HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
 	Hep3Vector interceptPoint = null;
@@ -2354,6 +2162,7 @@
 	}
     }
 
+    // used in scanForPhotons -- no longer needed?
     private double radiusToCoverFractionOfPhoton(Cluster clus, double fraction) {
 	// Find core
 	Cluster coreSubCluster = clus.getClusters().get(0);
@@ -3079,7 +2888,7 @@
 	}
     }
 
-    void splitPhotonSeeds(Map<Cluster, List<Track>> clustersMatchedToTracks, Map<Track,Cluster> tracksMatchedToClusters, List<Cluster> modifiedPhotonClusters, List<Cluster> electronClusters, List<Cluster> photonLikePhotons, List<Cluster> chargedHadronLikePhotons, List<Cluster> photonFragments, List<Cluster> mipsOld, List<Cluster> mipsNew, List<Cluster> mips, List<Cluster> clumps, Map<Cluster, Cluster> treeOfMip, Map<Cluster, Cluster> treeOfClump, Map<Cluster, Cluster> treeOfLeftoverHits, List<Cluster> treesWithNoStructure) {
+    void splitPhotonSeeds(Map<Cluster, List<Track>> clustersMatchedToTracks, Map<Track,Cluster> tracksMatchedToClusters, List<Cluster> modifiedPhotonClusters, List<Cluster> electronClusters, List<Cluster> photonLikePhotons, List<Cluster> chargedHadronLikePhotons, List<Cluster> photonFragments, List<Cluster> mipsOld, List<Cluster> mipsNew, List<Cluster> mips, List<Cluster> clumps, List<Cluster> treesWithNoStructure) {
 	Set<Cluster> tmpCacheClusterSet = new HashSet<Cluster>();
 	tmpCacheClusterSet.addAll(clustersMatchedToTracks.keySet());
 	for (Cluster clus : tmpCacheClusterSet) {
@@ -3110,9 +2919,6 @@
 			    mips.addAll(mipClustersOld);
 			    mips.addAll(mipClustersNew);
 			    clumps.addAll(clumpClusters);
-			    for (Cluster mip : mipClustersOld) { treeOfMip.put(mip, null); } // hmm...
-			    for (Cluster mip : mipClustersNew) { treeOfMip.put(mip, null); } // hmm...
-			    for (Cluster clump : clumpClusters) { treeOfClump.put(clump, null); } // hmm...
 			    splitPhotonPieces_mip.addAll(mipClustersNew);
 			    splitPhotonPieces_mip.addAll(mipClustersOld);
 			    splitPhotonPieces_clump.addAll(clumpClusters);

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.42 -> 1.43
diff -u -r1.42 -r1.43
--- ReclusterDriver.java	13 Oct 2008 06:33:13 -0000	1.42
+++ ReclusterDriver.java	15 Oct 2008 22:10:25 -0000	1.43
@@ -39,7 +39,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.42 2008/10/13 06:33:13 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.43 2008/10/15 22:10:25 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -2878,6 +2878,12 @@
 	    }
 	}
     }
+
+    protected MultipleTrackTrack makeMultipleTrackTrack(Collection<Track> tracks) {
+ 	MultipleTrackTrack trk = new MultipleTrackTrack(tracks);
+ 	return trk;
+    }
+
     protected class MultipleTrackTrack extends BaseTrack {
 	protected Collection<Track> m_tracks;
 	public MultipleTrackTrack(Collection<Track> tracks) {

lcsim/src/org/lcsim/contrib/uiowa
RemoveHitsFromClusters.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- RemoveHitsFromClusters.java	13 Oct 2008 07:16:15 -0000	1.1
+++ RemoveHitsFromClusters.java	15 Oct 2008 22:10:25 -0000	1.2
@@ -21,26 +21,6 @@
     }
 
     public void process(EventHeader event) {
-	{
-	    // DEBUG
-	    HitMap hitsEcalBarrel = (HitMap)(event.get("EcalBarrDigiHitMap"));
-	    HitMap hitsEcalEndcap = (HitMap)(event.get("EcalEndcapDigiHitMap"));
-	    HitMap hitsHcalBarrel = (HitMap)(event.get("HcalBarrDigiHitMap"));
-	    HitMap hitsHcalEndcap = (HitMap)(event.get("HcalEndcapDigiHitMap"));
-	    HitMap hitsMcalBarrel = (HitMap)(event.get("MuonBarrDigiHitMap"));
-	    HitMap hitsMcalEndcap = (HitMap)(event.get("MuonEndcapDigiHitMap"));
-	    System.out.println("DEBUG: hitsEcalBarrel contains "+hitsEcalBarrel.size()+" hits");
-	    System.out.println("DEBUG: hitsEcalEndcap contains "+hitsEcalEndcap.size()+" hits");
-	    System.out.println("DEBUG: hitsHcalBarrel contains "+hitsHcalBarrel.size()+" hits");
-	    System.out.println("DEBUG: hitsHcalEndcap contains "+hitsHcalEndcap.size()+" hits");
-	    System.out.println("DEBUG: hitsMcalBarrel contains "+hitsMcalBarrel.size()+" hits");
-	    System.out.println("DEBUG: hitsMcalEndcap contains "+hitsMcalEndcap.size()+" hits");
-	    HitMap test1 = (HitMap)(event.get("AllDigiHitMap"));
-	    System.out.println("DEBUG: AllDigiHitMap contains "+test1.size()+" hits");
-	    HitMap test2 = (HitMap)(event.get("RecoDigiHitMapWithoutMuons"));
-	    System.out.println("DEBUG: RecoDigiHitMapWithoutMuons contains "+test2.size()+" hits");
-	}
-
 	// Which hits will we remove?
 	Map<Track,Cluster> inputMap = (Map<Track,Cluster>)(event.get(m_inputMapName));
 	Set<CalorimeterHit> hitsToRemove = new HashSet<CalorimeterHit>();

lcsim/src/org/lcsim/contrib/uiowa
SetUpDTreeForReclustering.java 1.10 -> 1.11
diff -u -r1.10 -r1.11
--- SetUpDTreeForReclustering.java	13 Oct 2008 06:34:03 -0000	1.10
+++ SetUpDTreeForReclustering.java	15 Oct 2008 22:10:25 -0000	1.11
@@ -1,6 +1,7 @@
 package org.lcsim.contrib.uiowa;
 
 import java.util.*;
+import hep.physics.vec.*;
 import org.lcsim.util.Driver;
 import org.lcsim.event.EventHeader;
 import org.lcsim.util.*;
@@ -54,20 +55,6 @@
 	}
 	add(combineReco);
 
-	// Find muons
-	MuonFinderWrapper muonFinder = new MuonFinderWrapper("FSReconTracks", "AllDigiHitMap", "MuonTrackClusterMap", "AllDigiHitMapWithoutMuons", "FSReconTracksWithoutMuons");
-	muonFinder.skip(); // TEST
-	add(muonFinder);
-	add(new HitMapSubtractDriver("RecoDigiHitMap", "AllDigiHitMapWithoutMuons", "RecoDigiHitMapMuons")); // Identify the muon hits within useable hit block
-	add(new HitMapSubtractDriver("RecoDigiHitMap", "RecoDigiHitMapMuons", "RecoDigiHitMapWithoutMuons")); // Non-muon hits within useable hit block
-
-	// TJ's pre-shower MIP-finder
-	ShowerPointFinderDriver showerFinder = new ShowerPointFinderDriver(findCluster, "RecoDigiHitMapWithoutMuons", "FSReconTracksWithoutMuons", "ShowerFinderMapTrackToMip", "RecoDigiHitMapWithoutMuonsOrMips", "ShowerFinderMips");
-	add(showerFinder);
-	add(new CheckDisjoint("RecoDigiHitMapWithoutMuonsOrMips", "ShowerFinderMips"));
-	// Steve's pre-shower MIP-finder
-	add(new SteveMipWrapper());
-
 	// Find photons in ECAL
 	{
 	    // We have to use ALL hits for the photon-finder -- this is because it uses the
@@ -76,15 +63,59 @@
 	    add(photonFinder);
 	    add(new TransientFlagDriver("PreliminaryPhotonClustersForDTree"));
 
+	    // Check for electrons and set those to one side so we don't accidentally cluster them or use their tracks:
+	    add(new TrackToElectronMapMaker(findCluster, "PreliminaryPhotonClustersForDTree", "FSReconTracks", "MapElectronTracksToClusters", "TracksWithoutElectrons", "ElectronMapClusters"));
+	    add(new ListSubtractDriver("PreliminaryPhotonClustersForDTree", "ElectronMapClusters", "PreliminaryPhotonClustersForDTreeMinusElectrons"));
+	    add(new ClusterListToHitMapDriver("ElectronMapClusters", "ElectronHitMap"));
+	    add(new HitMapSubtractDriver("RecoDigiHitMap", "ElectronHitMap", "RecoDigiHitMapWithoutElectrons"));
+	    add(new HitMapSubtractDriver("AllDigiHitMap", "ElectronHitMap", "AllDigiHitMapWithoutElectrons"));
+	}
+
+
+	{
+	    // Find muons
+	    MuonFinderWrapper muonFinder = new MuonFinderWrapper("TracksWithoutElectrons", "AllDigiHitMapWithoutElectrons", "MuonTrackClusterMap", "AllDigiHitMapWithoutElectronsOrMuons", "TracksWithoutElectronsOrMuons");
+	    //muonFinder.skip(); // TEST
+	    add(muonFinder);
+	    // Identify the muon hits within useable hit block
+	    add(new HitMapSubtractDriver("RecoDigiHitMapWithoutElectrons", "AllDigiHitMapWithoutElectronsOrMuons", "RecoDigiHitMapMuons"));
+	    // Non-muon hits within useable hit block
+	    add(new HitMapSubtractDriver("RecoDigiHitMapWithoutElectrons", "RecoDigiHitMapMuons", "RecoDigiHitMapWithoutElectronsOrMuons"));
+	}
+
+	{
+	    // TJ's pre-shower MIP-finder
+	    ShowerPointFinderDriver showerFinder = new ShowerPointFinderDriver(findCluster, "RecoDigiHitMapWithoutElectronsOrMuons", "TracksWithoutElectronsOrMuons", "ShowerFinderMapTrackToMip", "RecoDigiHitMapWithoutElectronsOrMuonsOrMips", "ShowerFinderMips");
+	    add(showerFinder);
+	    add(new CheckDisjoint("RecoDigiHitMapWithoutElectronsOrMuonsOrMips", "ShowerFinderMips"));
+	    // Steve's pre-shower MIP-finder
+	    add(new SteveMipWrapper());
+
+	    // Match tracks -> pre-shower MIPs (best possible linkage)
+	    add(new TrackToPreShowerMipMapMaker("ShowerFinderMapTrackToMip", "TracksWithoutElectronsOrMuons", "MapPreShowerMipTracksToClusterSeeds", "UnmatchedTracksAfterPreShowerMipMap", "PreShowerMipMatchMipClusters", "PreShowerMipMatchSmallClusters", "PreShowerMipMatchBlockClusters"));
+	    // At this point, we COULD take the unlinked MIP hits back out and recycle them
+	    // into the DTree clustering. (The unlinked MIPs are the ones where two the MIP
+	    // clusters of 2+ tracks overlapped, or where there were too few hits, or both.)
+	    add(new ClusterListToHitMapDriver("PreShowerMipMatchSmallClusters", "PreShowerMipMatchSmallClusterHits"));
+	    add(new ClusterListToHitMapDriver("PreShowerMipMatchBlockClusters", "PreShowerMipMatchBlockClusterHits"));
+	    HitMapAddDriver remergeBadMips = new HitMapAddDriver();
+	    remergeBadMips.addInputHitMap("PreShowerMipMatchSmallClusterHits");
+	    remergeBadMips.addInputHitMap("PreShowerMipMatchBlockClusterHits");
+	    remergeBadMips.addInputHitMap("RecoDigiHitMapWithoutElectronsOrMuonsOrMips");
+	    remergeBadMips.setOutputHitMap("RecoDigiHitMapWithoutElectronsOrMuonsOrGoodMips");
+	    add(remergeBadMips); // TEST
+	}
+
+	{
 	    // Now go back and ensure that no photon uses hits from a charged particle's MIP.
 	    // We can either remove those individual hits or veto entire clusters.
 	    boolean remove = false;
 	    boolean findVetoedPhotons = true;
 	    if (remove) {
-		add(new RemoveHitsFromClusters("PreliminaryPhotonClustersForDTree", "MuonTrackClusterMap", "PhotonsMinusMuonHits"));
+		add(new RemoveHitsFromClusters("PreliminaryPhotonClustersForDTreeMinusElectrons", "MuonTrackClusterMap", "PhotonsMinusMuonHits"));
 		add(new RemoveHitsFromClusters("PhotonsMinusMuonHits", "ShowerFinderMapTrackToMip", "PhotonClustersForDTree"));
 	    } else {
-		add(new VetoHitsFromClusters("PreliminaryPhotonClustersForDTree", "MuonTrackClusterMap", "PhotonsMinusMuonHits"));
+		add(new VetoHitsFromClusters("PreliminaryPhotonClustersForDTreeMinusElectrons", "MuonTrackClusterMap", "PhotonsMinusMuonHits"));
 		add(new VetoHitsFromClusters("PhotonsMinusMuonHits", "ShowerFinderMapTrackToMip", "PhotonClustersForDTree"));
 		if (findVetoedPhotons) {
 		    // Test: pick out vetoed clusters
@@ -98,19 +129,19 @@
 	    add(new TransientFlagDriver("PhotonsMinusMuonHits"));
 	    add(new TransientFlagDriver("PhotonClustersForDTree"));
 
-	    // Identify which hits were used for photons
+	    // Identify which hits were used for photons, electrons
 	    add(new ClusterListToHitMapDriver("PhotonClustersForDTree", "PhotonHitMap"));
 	    add(new CheckDisjoint("ShowerFinderMips", "PhotonHitMap"));
 
 	    // Identify remaining hits not used for photons or muons or mips
 	    if (!remove && findVetoedPhotons) {
-		add(new HitMapSubtractDriver("RecoDigiHitMapWithoutMuonsOrMips", "PhotonHitMap", "TmpRecoDigiHitMapWithoutMuonsOrMipsOrPhotons"));
-		add(new HitMapSubtractDriver("TmpRecoDigiHitMapWithoutMuonsOrMipsOrPhotons", "PhotonVetoHitMap", "RecoDigiHitMapWithoutMuonsOrMipsOrPhotons"));
+		add(new HitMapSubtractDriver("RecoDigiHitMapWithoutElectronsOrMuonsOrGoodMips", "PhotonHitMap", "TmpRecoDigiHitMapWithoutElectronsOrMuonsOrMipsOrPhotons"));
+		add(new HitMapSubtractDriver("TmpRecoDigiHitMapWithoutElectronsOrMuonsOrMipsOrPhotons", "PhotonVetoHitMap", "RecoDigiHitMapWithoutElectronsOrMuonsOrMipsOrPhotons"));
 	    } else {
-		add(new HitMapSubtractDriver("RecoDigiHitMapWithoutMuonsOrMips", "PhotonHitMap", "RecoDigiHitMapWithoutMuonsOrMipsOrPhotons")); // Remove Muon/MIP hits
+		add(new HitMapSubtractDriver("RecoDigiHitMapWithoutElectronsOrMuonsOrGoodMips", "PhotonHitMap", "RecoDigiHitMapWithoutElectronsOrMuonsOrMipsOrPhotons")); // Remove Muon/MIP hits
 		//add(new HitMapSubtractDriver("RecoDigiHitMap", "PhotonHitMap", "RecoDigiHitMapWithoutMuonsOrMipsOrPhotons")); // Keep all non-photon hits
 	    }
-	    add(new CheckDisjoint("RecoDigiHitMapWithoutMuonsOrMipsOrPhotons", "PhotonHitMap"));
+	    add(new CheckDisjoint("RecoDigiHitMapWithoutElectronsOrMuonsOrMipsOrPhotons", "PhotonHitMap"));
 	}
 
 	// Run DTree on each subdetector separately:
@@ -122,7 +153,7 @@
 	    String outputClusterListName = new String(rawInputHitMapName+"Clusters");
 	    mapInputListToDTreeClusterList.put(rawInputName, outputClusterListName);
 	    // Filter hitmap to only contain hits from this subdet that are not muon/mip/photon hits
-	    add(new HitMapAndDriver(rawInputHitMapName, "RecoDigiHitMapWithoutMuonsOrMipsOrPhotons", filteredInputHitMapName));
+	    add(new HitMapAndDriver(rawInputHitMapName, "RecoDigiHitMapWithoutElectronsOrMuonsOrMipsOrPhotons", filteredInputHitMapName));
 	    if (hitsForMST.contains(rawInputName)) {
 		// Run MST
 		org.lcsim.recon.cluster.mst.MSTClusterDriver mstDriver = new org.lcsim.recon.cluster.mst.MSTClusterDriver(outputHitMapName, outputClusterListName);
@@ -172,5 +203,76 @@
 	add(new TransientFlagDriver("DTreeClustersMCAL"));
 	add(new TransientFlagDriver("DTreeClusters"));
 
+	// OK, now go back and look for hits near boundaries (for use when making MIPs
+	// with a layer-based algorithm so it can cross from endcap to barrel).
+	{
+	    HitNearBarrelEndcapBoundaryDecision dec = new HitNearBarrelEndcapBoundaryDecision(6.0, 15.0, 1);
+	    add(dec);
+	    add(new ListFilterDriver(dec, "EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
+	    add(new ListFilterDriver(dec, "HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", CalorimeterHit.class));
+	    add(new ListFilterDriver(dec, "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
+	    add(new ListFilterDriver(dec, "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary", CalorimeterHit.class));
+	    add(new TransientFlagDriver("EcalBarrDigiHitsNearBoundary"));
+	    add(new TransientFlagDriver("HcalBarrDigiHitsNearBoundary"));
+	    add(new TransientFlagDriver("EcalEndcapDigiHitsNearBoundary"));
+	    add(new TransientFlagDriver("HcalEndcapDigiHitsNearBoundary"));
+	}
+
+	// Look for substructure inside clusters:
+	{
+	    int m_minHitsToBeTreatedAsClusterECAL = 15;
+	    int m_minHitsToBeTreatedAsClusterHCAL = 20;
+	    int m_minHitsToBeTreatedAsClusterMCAL = 5;
+	    int m_minHitsToBeTreatedAsClusterFCAL = m_minHitsToBeTreatedAsClusterECAL;
+	    double m_newMipFinderRadiusECAL = 20.0;
+	    double m_newMipFinderRadiusHCAL = 50.0;
+	    double m_newMipFinderRadiusMCAL = 100.0;
+	    double m_newMipFinderRadiusFCAL = m_newMipFinderRadiusECAL;
+	    boolean m_removePoorQualityMips = false;
+
+	    FindSubClusters clusDriverECAL = new FindSubClusters("DTreeClustersECAL", m_newMipFinderRadiusECAL, m_minHitsToBeTreatedAsClusterECAL, m_removePoorQualityMips, "OldMipsInsideTreesECAL", "NewMipsInsideTreesECAL", "ClumpsInsideTreesECAL", "BlocksInsideTreesECAL", "LeftoverHitsInsideTreesECAL", "MapTreeToTargetsECAL", "MapSharedToTreeECAL");
+	    FindSubClusters clusDriverHCAL = new FindSubClusters("DTreeClustersHCAL", m_newMipFinderRadiusHCAL, m_minHitsToBeTreatedAsClusterHCAL, m_removePoorQualityMips, "OldMipsInsideTreesHCAL", "NewMipsInsideTreesHCAL", "ClumpsInsideTreesHCAL", "BlocksInsideTreesHCAL", "LeftoverHitsInsideTreesHCAL", "MapTreeToTargetsHCAL", "MapSharedToTreeHCAL");
+	    FindSubClusters clusDriverMCAL = new FindSubClusters("DTreeClustersMCAL", m_newMipFinderRadiusMCAL, m_minHitsToBeTreatedAsClusterMCAL, m_removePoorQualityMips, "OldMipsInsideTreesMCAL", "NewMipsInsideTreesMCAL", "ClumpsInsideTreesMCAL", "BlocksInsideTreesMCAL", "LeftoverHitsInsideTreesMCAL", "MapTreeToTargetsMCAL", "MapSharedToTreeMCAL");
+	    //FindSubClusters clusDriverFCAL = new FindSubClusters("DTreeClustersFCAL", m_newMipFinderRadiusFCAL, m_minHitsToBeTreatedAsClusterFCAL, m_removePoorQualityMips, "OldMipsInsideTreesFCAL", "NewMipsInsideTreesFCAL", "ClumpsInsideTreesFCAL", "BlocksInsideTreesFCAL", "LeftoverHitsInsideTreesFCAL", "MapTreeToTargetsFCAL", "MapSharedToTreeFCAL");
+	    clusDriverECAL.enableBarrelEndcapCrossing("EcalBarrDigiHits", "EcalBarrDigiHitsNearBoundary", "EcalEndcapDigiHits", "EcalEndcapDigiHitsNearBoundary");
+	    clusDriverHCAL.enableBarrelEndcapCrossing("HcalBarrDigiHits", "HcalBarrDigiHitsNearBoundary", "HcalEndcapDigiHits", "HcalEndcapDigiHitsNearBoundary");
+	    clusDriverECAL.setNNrange(1,1,1);
+	    clusDriverHCAL.setNNrange(2,2,1);
+	    clusDriverMCAL.setNNrange(2,2,1);
+	    //clusDriverFCAL.setNNrange(1,1,1);
+	    add(clusDriverECAL);
+	    add(clusDriverHCAL);
+	    add(clusDriverMCAL);
+	}
+
+	// Track-cluster matching in steps:
+	// Already done electrons, muons, and good pre-shower MIPs.
+	// Now take a go at the rest...
+
+	// Match tracks to MIP clusters in ECAL (ambiguities not allowed)
+	TrackToMipClusterMapMaker mipMapMaker = new TrackToMipClusterMapMaker(findCluster, "UnmatchedTracksAfterPreShowerMipMap", "MapMipClusterTracksToClusterSeeds", "UnmatchedTracksAfterMipClusterMap");
+	mipMapMaker.addInputList("OldMipsInsideTreesECAL", "MipMatchedOldMipsInsideTreesECAL", "UnmatchedOldMipsInsideTreesECALAfterMipMatch");
+	mipMapMaker.addInputList("NewMipsInsideTreesECAL", "MipMatchedNewMipsInsideTreesECAL", "UnmatchedNewMipsInsideTreesECALAfterMipMatch");
+	add(mipMapMaker);
+	// Match tracks to generic clusters in ECAL
+	TrackToGenericClusterMapMaker genMapMaker = new TrackToGenericClusterMapMaker(findCluster, "UnmatchedTracksAfterMipClusterMap", "MapGenClusterTracksToClusterSeeds", "UnmatchedTracksAfterGenClusterMap");
+	genMapMaker.addInputList("UnmatchedOldMipsInsideTreesECALAfterMipMatch", "GenMatchedOldMipsInsideTreesECAL", "UnmatchedOldMipsInsideTreesECALAfterGenMatch");
+	genMapMaker.addInputList("UnmatchedNewMipsInsideTreesECALAfterMipMatch", "GenMatchedNewMipsInsideTreesECAL", "UnmatchedNewMipsInsideTreesECALAfterGenMatch");
+	genMapMaker.addInputList("ClumpsInsideTreesECAL", "GenMatchedClumpsInsideTreesECAL", "UnmatchedClumpsInsideTreesECALAfterGenMatch");
+	genMapMaker.addInputList("BlocksInsideTreesECAL", "GenMatchedBlocksInsideTreesECAL", "UnmatchedBlocksInsideTreesECALAfterGenMatch");
+	genMapMaker.addInputList("LeftoverHitsInsideTreesECAL", "GenMatchedLeftoverHitsInsideTreesECAL", "UnmatchedLeftoverHitsInsideTreesECALAfterGenMatch");
+	genMapMaker.addInputList("PhotonClustersForDTree", "GenMatchedPhotonClustersForDTree", "UnmatchedPhotonClustersForDTreeAfterGenMatch");
+	add(genMapMaker);
+	// [here: can split photon seeds, large seeds, etc...]
+	// Remaining tracks have either NO match or an ambiguous one (i.e. >1 track to same cluster
+	AmbiguousTrackToClusterMapMaker ambiguousMapMaker = new AmbiguousTrackToClusterMapMaker(findCluster, "UnmatchedTracksAfterGenClusterMap", "MapAmbigClusterTracksToClusterSeeds", "UnmatchedTracksAfterAmbigClusterMap");
+	ambiguousMapMaker.addInputList("UnmatchedOldMipsInsideTreesECALAfterGenMatch", "AmbigMatchedOldMipsInsideTreesECAL", "UnmatchedOldMipsInsideTreesECALAfterAmbigMatch");
+	ambiguousMapMaker.addInputList("UnmatchedNewMipsInsideTreesECALAfterGenMatch", "AmbigMatchedNewMipsInsideTreesECAL", "UnmatchedNewMipsInsideTreesECALAfterAmbigMatch");
+	ambiguousMapMaker.addInputList("UnmatchedClumpsInsideTreesECALAfterGenMatch", "AmbigMatchedClumpsInsideTreesECAL", "UnmatchedClumpsInsideTreesECALAfterAmbigMatch");
+	ambiguousMapMaker.addInputList("UnmatchedBlocksInsideTreesECALAfterGenMatch", "AmbigMatchedBlocksInsideTreesECAL", "UnmatchedBlocksInsideTreesECALAfterAmbigMatch");
+	ambiguousMapMaker.addInputList("UnmatchedLeftoverHitsInsideTreesECALAfterGenMatch", "AmbigMatchedLeftoverHitsInsideTreesECAL", "UnmatchedLeftoverHitsInsideTreesECALAfterAmbigMatch");
+	ambiguousMapMaker.addInputList("UnmatchedPhotonClustersForDTreeAfterGenMatch", "AmbigMatchedPhotonClustersForDTree", "UnmatchedPhotonClustersForDTreeAfterAmbigMatch");
+	add(ambiguousMapMaker);
+	// [here: can try to split large seeds with >1 track]
     }
 }
CVSspam 0.2.8