Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReassignClustersAlgorithm.java+10added 1.1
ConeReassignmentAlgorithm.java+31added 1.1
ReclusterDTreeDriver.java+126-3531.22 -> 1.23
+167-353
2 added + 1 modified, total 3 files
MJC: (contrib) Refactor cluster reassignment algorithm

lcsim/src/org/lcsim/contrib/uiowa
ReassignClustersAlgorithm.java added at 1.1
diff -N ReassignClustersAlgorithm.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ReassignClustersAlgorithm.java	18 Jun 2008 18:29:50 -0000	1.1
@@ -0,0 +1,10 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*; 
+import org.lcsim.util.*;
+import org.lcsim.event.util.*;
+import org.lcsim.event.*;
+
+public interface ReassignClustersAlgorithm {
+    public Double computeFigureOfMerit(Track tr, Cluster clus);
+}

lcsim/src/org/lcsim/contrib/uiowa
ConeReassignmentAlgorithm.java added at 1.1
diff -N ConeReassignmentAlgorithm.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ConeReassignmentAlgorithm.java	18 Jun 2008 18:29:50 -0000	1.1
@@ -0,0 +1,31 @@
+package org.lcsim.contrib.uiowa;
+
+import java.util.*; 
+import org.lcsim.util.*;
+import org.lcsim.event.util.*;
+import org.lcsim.event.*;
+import hep.physics.vec.*;
+
+public class ConeReassignmentAlgorithm implements ReassignClustersAlgorithm {
+    protected double m_limit;
+    protected LocalHelixExtrapolator m_findCluster;
+    public ConeReassignmentAlgorithm(double limit, LocalHelixExtrapolator findCluster) {
+	m_limit = limit;
+	m_findCluster = findCluster;
+    }
+    public Double computeFigureOfMerit(Track tr, Cluster clus) {
+	Hep3Vector interceptPoint = m_findCluster.performExtrapolation(tr);
+	if (interceptPoint != null) {
+	    Hep3Vector tangent = m_findCluster.getTangent();
+	    Hep3Vector tangentUnit = VecOp.unit(tangent);
+	    Hep3Vector clusterPosition = new BasicHep3Vector(clus.getPosition());
+	    Hep3Vector displacement = VecOp.sub(clusterPosition, interceptPoint);
+	    Hep3Vector displacementUnit = VecOp.unit(displacement);
+	    double angle=Math.acos(VecOp.dot(tangentUnit, displacementUnit));
+	    if (angle < m_limit) {
+		return new Double(angle);
+	    }
+	}
+	return null;
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.22 -> 1.23
diff -u -r1.22 -r1.23
--- ReclusterDTreeDriver.java	13 Jun 2008 22:46:38 -0000	1.22
+++ ReclusterDTreeDriver.java	18 Jun 2008 18:29:50 -0000	1.23
@@ -34,7 +34,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.22 2008/06/13 22:46:38 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.23 2008/06/18 18:29:50 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -515,294 +515,39 @@
 	applyOverrides(linkableClusters, photonLikePhotons, electronClusters, tracksMatchedToClusters, newMapShowerComponentToTrack, newMapTrackToShowerComponents, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, newMapTrackToVetoedAdditions, newMapTrackToTolerance, allSharedClusters);
 
 	// Book-keeping for cone-adding
+	boolean m_usePhotonsForConeTrackFix = false;
 	Set<Cluster> unassignedClusters = new HashSet<Cluster>();
 	for (Cluster clus : linkableClusters) {
 	    Track matchedTrack = newMapShowerComponentToTrack.get(clus);
 	    Set<Track> matchedJet = newMapShowerComponentToJet.get(clus);
 	    if (matchedTrack == null && matchedJet == null) {
-		unassignedClusters.add(clus);
+		if (modifiedPhotonClusters.contains(clus) && !m_usePhotonsForConeTrackFix) {
+		    // This is a photon and we aren't using them for the cone -- ignore it
+		} else {
+		    unassignedClusters.add(clus);
+		}
 	    }
 	}
 	boolean m_fixSingleTracksWithCone = true;
 	boolean m_fixJetsWithCone = true;
+	LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
+	findCluster.process(m_event); // picks up geometry
+	ReassignClustersAlgorithm algorithm = new ConeReassignmentAlgorithm(1.0, findCluster);
 	if (m_fixSingleTracksWithCone) {
 	    // First, try to fix the simplest case: single tracks with E/p < 1
 	    for (Track tr : tracksSortedByMomentum) {
-		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
-		if (mapTrackToJet.get(tr) != null) {
-		    // From a jet -- ignore
-		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it comes from a jet");
-		    continue;
-		}
-		Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
-		boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
-		if (punchThrough) {
-		    // Punches through -- no E/p info -- ignore
-		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it punches through");
-		    continue;
-		}
-		double showerEnergy = energy(showerComponents, allSharedClusters);
-		double toleranceOfTrack = newMapTrackToTolerance.get(tr);
-		boolean passesEoverP = testEoverP_twoSided(showerEnergy, trackMomentum, toleranceOfTrack);
-		if (passesEoverP) {
-		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it passes E/p (E="+showerEnergy+")");
-		    // Passes E/p check -- ignore
-		    continue;
-		} else if (showerEnergy > trackMomentum) {
-		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it E>>p (E="+showerEnergy+")");
-		    // E>>p -- ignore
-		    continue;
-		}
-		// OK. This single track needs more energy.
-		System.out.println("DEBUG: Handling track with p="+trackMomentum+" since it fails E/p (E="+showerEnergy+")...");
-		double limit = 1.0; // arbitrary
-		List<Cluster> reassignedClusters = reassignClustersToTrack(tr, showerComponents, unassignedClusters, allSharedClusters, toleranceOfTrack, limit);
-		if (reassignedClusters != null && reassignedClusters.size()>0) {
-		    for (Cluster clus : reassignedClusters) {
-			showerComponents.add(clus);
-			unassignedClusters.remove(clus);
-			newMapShowerComponentToTrack.put(clus, tr);
-		    }
-		}
-		double newShowerEnergy = energy(showerComponents, allSharedClusters);
-		boolean newPassesEoverP = testEoverP_twoSided(newShowerEnergy, trackMomentum, toleranceOfTrack);
-		boolean newPunchThrough = isPunchThrough(showerComponents, allSharedClusters);
-		System.out.println("DEBUG: After handling track with p="+trackMomentum+", new E = "+newShowerEnergy+" => new E/p="+(newShowerEnergy/trackMomentum)+" => pass="+newPassesEoverP+"; punch-through="+newPunchThrough);
+		checkTrackForReassignments(tr, newMapTrackToShowerComponents, newMapShowerComponentToTrack, allSharedClusters, unassignedClusters, newMapTrackToTolerance.get(tr), algorithm);
 	    }
 	}
 	if (m_fixJetsWithCone) {
 	    for (Set<Track> jet : jets) {
-		Set<Cluster> showerComponents = newMapJetToShowerComponents.get(jet);
-		boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
-		double jetMomentum = jetScalarMomentum(jet);
-		if (punchThrough) {
-		    // Punches through -- no E/p info -- ignore
-		    System.out.println("DEBUG: Ignoring jet with total p="+jetMomentum+" since it punches through");
-		    continue;
-		}
-		double jetEnergy = energy(showerComponents, allSharedClusters);
-		boolean passesEoverP = testEoverP_twoSided(jetEnergy, jetMomentum, m_jetTolerance);
-		if (passesEoverP) {
-		    System.out.println("DEBUG: Ignoring jet with total p="+jetMomentum+" since it passes E/p (E="+jetEnergy+")");
-		    // Passes E/p check -- ignore
-		    continue;
-		} else if (jetEnergy > jetMomentum) {
-		    System.out.println("DEBUG: Ignoring jet with p="+jetMomentum+" since it E>>p (E="+jetEnergy+")");
-		    // E>>p -- ignore
-		    continue;
-		}
-		// OK. This jet needs more energy
-		System.out.println("DEBUG: Handling jet with total p="+jetMomentum+" since it fails E/p (E="+jetEnergy+")...");
-		double limit = 1.0; // arbitrary
-		List<Cluster> reassignedClusters = reassignClustersToJet(jet, showerComponents, unassignedClusters, allSharedClusters, m_jetTolerance, limit);
-		if (reassignedClusters != null && reassignedClusters.size()>0) {
-		    for (Cluster clus : reassignedClusters) {
-			showerComponents.add(clus);
-			unassignedClusters.remove(clus);
-			newMapShowerComponentToJet.put(clus, jet);
-		    }
-		}
-		double newJetEnergy = energy(showerComponents, allSharedClusters);
-		boolean newPassesEoverP = testEoverP_twoSided(newJetEnergy, jetMomentum, m_jetTolerance);
-		boolean newPunchThrough = isPunchThrough(showerComponents, allSharedClusters);
-		System.out.println("DEBUG: After handling jet with total p="+jetMomentum+", new E = "+newJetEnergy+" => new E/p="+(newJetEnergy/jetMomentum)+" => pass="+newPassesEoverP+"; punch-through="+newPunchThrough);
-	    }
-	}
-
-	/*
-
-	// Taejeong's code below
-
-      	// HERE
-	// 1) Look for particles with E/p << 1 that don't punch through [see updateScoring() code]
-	// 2) Look for non-assigned clusters & hits
-	// 3) Check position of non-assigned clusters/hits -- are they downstream of track?
-	// 4) Try to assign clusters/hits with the right position and energy to balance E/p`
-
-	// Example (Print out the momentum and energy)
-	System.out.println("New event starting...");
-	System.out.println();
-	double anglecut = 0.3;
-	double EoverPcut = 0.2;
-    for (Track tr : tracksSortedByMomentum) {
-    	Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
-        double clusterEnergy = energy(showerComponents, allSharedClusters);
-        double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
-        double EoverP = clusterEnergy/trackMomentum;
-        boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);  
-        //print P and E
-        System.out.print("P = " + trackMomentum);
-        System.out.print("	E = " + clusterEnergy);
-        System.out.println("	E/P = " + EoverP);
-               
-        LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
-        findCluster.process(event); // picks up geometry
-        Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
-        Hep3Vector tangent = findCluster.getTangent();
-		
-        if(interceptPoint==null) {
-			System.out.println("This track has no interceptpoint");
-			continue;
+		checkJetForReassignments(jet, newMapJetToShowerComponents, newMapShowerComponentToJet, allSharedClusters, unassignedClusters, algorithm);
 	    }
-		
-        boolean debug = false;
-        //covert to theta and phi from x,y,z position
-        double intX = interceptPoint.x();
-        double intY = interceptPoint.y();
-        double intZ = interceptPoint.z();  
-        double tanX = tangent.x();
-        double tanY = tangent.y();
-        double tanZ = tangent.z();
-		double momZ = tr.getMomentum()[2];
-		if (momZ * tanZ < 0.0) {
-		    tanX = -tanX;
-		    tanY = -tanY;
-		    tanZ = -tanZ;
-		    tangent = new BasicHep3Vector(tanX, tanY, tanZ);
-		    //System.out.println("WARNING: Flipped a track tangent direction");
-		}
-
-        if(debug){
-            System.out.println("intx= "+intX+" inty= "+intY+" intZ= "+intZ);
-            System.out.println("tanx= "+tanX+" tany= "+tanY+" tanZ= "+tanZ);
-        }
-              
-        if (mapTrackToJet.get(tr) != null) {
-        	// Treat as jet
-        	Set<Track> jet = mapTrackToJet.get(tr);
-        	Set<Cluster> jetComponents = newMapJetToShowerComponents.get(jet);
-            double jetEnergy = energy(jetComponents, allSharedClusters);
-            double jetMomentumMag = jetScalarMomentum(jet);
-            System.out.println("      from jet with E="+jetEnergy+" and P="+jetMomentumMag);
-        } else {
-        		if ( EoverP < EoverPcut && !punchThrough ) {
-                    System.out.println("This track fails EoverP and also not puchthrough! need to reassign clusters");
-                    //Linkable clusters which are not assigned!
-                	List<Cluster> reassignedLinkable = reassignClustersToTrack(interceptPoint, tangent, linkableClusters , anglecut, newMapShowerComponentToTrack, allSharedClusters, clusterEnergy, trackMomentum);
-                	for(Cluster clus : reassignedLinkable) {
-                		double reassignedClusterEnergy = energy(clus, allSharedClusters);      
-                    	System.out.println("a downstrem of this track with "+reassignedClusterEnergy +" GeV");  
-                	}
-                    Set<Cluster> newShowerComponents = new HashSet<Cluster>();
-					newShowerComponents.addAll(showerComponents);
-					newShowerComponents.addAll(reassignedLinkable);
-					double newEnergy = energy(newShowerComponents, allSharedClusters);
-				    System.out.println("This track: clusterE=  " + clusterEnergy+ ", P= " + trackMomentum+ " now E= "+newEnergy);
-                	// Shared hits?
-                	//  -> smallClustersToShare
-                	//  -> leftoverHitClustersToShare
-                	// How much energy from "shared hits" in this cone?
-                	List<Cluster> bothListsOfSharedClusters = new Vector<Cluster>();
-                	bothListsOfSharedClusters.addAll(smallClustersToShare);
-                	bothListsOfSharedClusters.addAll(leftoverHitClustersToShare);
-                	// Look for which are in cone
-                	List<Cluster> sharedClustersInCone = reassignClustersToTrack(interceptPoint, tangent, bothListsOfSharedClusters , anglecut, newMapShowerComponentToTrack, allSharedClusters, clusterEnergy, trackMomentum);
-                	Cluster clusterOfSharedHitsInCone = makeCombinedCluster(sharedClustersInCone);
-                	double energyOfSharedHitsInCone = energy(clusterOfSharedHitsInCone);
-                	System.out.println(clusterOfSharedHitsInCone.getCalorimeterHits().size()+" shared hits from "+sharedClustersInCone.size()+" clusters in cone give total of "+energyOfSharedHitsInCone+" GeV");
-
-                	// All hits!
-    		    	List<Cluster> allHitsInEvent = new Vector<Cluster>();
-					for (CalorimeterHit hit : allHits) {
-						BasicCluster newClus = new BasicCluster();
-						newClus.addHit(hit);
-						allHitsInEvent.add(newClus);
-		     		}
-		     		List<Cluster> allHitsInCone = reassignClustersToTrack(interceptPoint, tangent, allHitsInEvent, anglecut, newMapShowerComponentToTrack, allSharedClusters, clusterEnergy, trackMomentum );
-		     		Cluster clusterOfAllHitsInCone = makeCombinedCluster(allHitsInCone);
-                	double energyOfAllHitsInCone = energy(clusterOfAllHitsInCone);
-		     		System.out.println(clusterOfAllHitsInCone.getCalorimeterHits().size()+" hits from "+allHitsInCone.size()+" clusters in cone give total of "+energyOfAllHitsInCone+" GeV");
-        		}
-        }  
 	}
-	
-	// Loop over jets...
-	System.out.println("Loop over jets");
-	System.out.println("number of jet= " + jets.size());
-	for (Set<Track> jet : jets) {
-		System.out.println("jet size= " + jet.size());
-        Set<Cluster> jetComponents = newMapJetToShowerComponents.get(jet);
-	    double jetEnergy = energy(jetComponents, allSharedClusters);
-	    double jetMomentumMag = jetScalarMomentum(jet);
-	    double EoverP = jetEnergy / jetMomentumMag;
-	    boolean punchThrough = isPunchThrough(jetComponents, allSharedClusters);
-	    if ( EoverP < EoverPcut && !punchThrough ) {
-		Set<Cluster> reassigned = new HashSet<Cluster>();
-		for (Track tr : jet) {
-		    LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
-		    findCluster.process(event); // picks up geometry
-		    Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
-		    Hep3Vector tangent = findCluster.getTangent();
-		    // [flip?]
-            double intX = interceptPoint.x();
-            double intY = interceptPoint.y();
-            double intZ = interceptPoint.z();
-            double tanX = tangent.x();
-            double tanY = tangent.y();
-            double tanZ = tangent.z();
-            double momZ = tr.getMomentum()[2];
-            if (momZ * tanZ < 0.0) {
-                tanX = -tanX;
-                tanY = -tanY;
-                tanZ = -tanZ;
-                tangent = new BasicHep3Vector(tanX, tanY, tanZ);
-                System.out.println("WARNING: Flipped a track tangent direction");
-            }
-                                                                                                                    
-		    List<Cluster> reassignedLinkable = reassignClustersToTrack(interceptPoint, tangent, linkableClusters , anglecut, newMapShowerComponentToTrack, allSharedClusters, jetEnergy, jetMomentumMag);
-		    reassigned.addAll(reassignedLinkable);
-  		}
-		Set<Cluster> newJetComponents = new HashSet<Cluster>();
-		newJetComponents.addAll(jetComponents);
-		newJetComponents.addAll(reassigned);
-		double newEnergy = energy(newJetComponents, allSharedClusters);
-		System.out.println("Jet E was  " + jetEnergy+ ", P was " + jetMomentumMag+ " E is now "+newEnergy);
-	    }
-
-	}	
-
-
-	// Several kinds of cluster:
-	// * old mips & new mips
-	// * clumps
-	// * blocks (large DTrees with no structure)
-	// * photons
-	// * a few others
-	// * plus small clusters & leftover hits
-	// All except the last fall into "linkableClusters"
-	// (and also small clusters etc in "smallClustersToShare" and "leftoverHitClustersToShare")
-
-//	for (Cluster clus : linkableClusters) {
-//	    Track trclus = newMapShowerComponentToTrack.get(clus);
-//	    if (trclus == null) {
-//               System.out.println("Cluster doesn't point to anything");
- //               System.out.println("Hit size= " + clus.getCalorimeterHits().size());
-//              System.out.println("E= " + clus.getEnergy());
-//                double[] pos = clus.getPosition();
-//                double thetaofclus = clus.getITheta();
-//                double phiofclus = clus.getIPhi();
-//                     
-//                for (Track tr : tracksSortedByMomentum) {
- //               	double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
-  //              	double Px = tr.getPX();
-   //             	double Py = tr.getPY();
-    //            	double Pz = tr.getPZ();
-     //           	double thetaoftrk = Math.acos(Pz/trackMomentum);
-      //          	double phioftrk = Math.atan(Py/Px);
-       //                 double deltaR = Math.sqrt((thetaofclus-thetaoftrk)*(thetaofclus-thetaoftrk)+(phiofclus-phioftrk)*(phiofclus-phioftrk));
-         //               if(deltaR < 0.1) System.out.println("This cluster is a downstrem of the track with " + trackMomentum);
-          //      }
-          //      System.out.println("This cluster is located at "+ thetaofclus + ", " + phiofclus);
-	   // } else {
-           // }
-//	}
-
 
-	*/
-
-        if (m_debug) {
-	    printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
-	}
+	//        if (m_debug) {
+	    printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
+	    //	}
 
 	// Outputs
 	makeParticlesAndWriteOut(trackList, tracksSortedByMomentum, unmatchedTracksThatDontReachCalorimeter, mapOrigTrackToTweakedTrack,
@@ -821,56 +566,117 @@
 	m_event = null;
     }
 
-    protected List<Cluster> reassignClustersToTrack (Track tr, Collection<Cluster> initialShower, Collection<Cluster> unassignedClusters, List<SharedClusterGroup> allSharedClusters, double tolerance, double limit) {
+    boolean checkIfReassignmentNeeded(Set<Track> jet, Set<Cluster> showerComponents, List<SharedClusterGroup> allSharedClusters)
+    {
+	double jetMomentum = jetScalarMomentum(jet);
+	boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
+	if (punchThrough) {
+	    // Punches through -- no E/p info -- ignore
+	    System.out.println("DEBUG: Ignoring jet with total p="+jetMomentum+" since it punches through");
+	    return false;
+	}
+	double jetEnergy = energy(showerComponents, allSharedClusters);
+	boolean passesEoverP = testEoverP_twoSided(jetEnergy, jetMomentum, m_jetTolerance);
+	if (passesEoverP) {
+	    System.out.println("DEBUG: Ignoring jet with total p="+jetMomentum+" since it passes E/p (E="+jetEnergy+")");
+	    // Passes E/p check -- ignore
+	    return false;
+	} else if (jetEnergy > jetMomentum) {
+	    System.out.println("DEBUG: Ignoring jet with p="+jetMomentum+" since it E>>p (E="+jetEnergy+")");
+	    // E>>p -- ignore
+	    return false;
+	}
+
+	// OK. This jet needs more energy
+	return true;
+    }
+
+    protected void checkTrackForReassignments(Track tr, 
+					      Map<Track, Set<Cluster>> newMapTrackToShowerComponents,
+					      Map<Cluster, Track> newMapShowerComponentToTrack,
+					      List<SharedClusterGroup> allSharedClusters,
+					      Set<Cluster> unassignedClusters,
+					      double toleranceOfTrack,
+					      ReassignClustersAlgorithm algorithm)
+    {
+	Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
 	Set<Track> tmpJet = new HashSet<Track>();
 	tmpJet.add(tr);
-	return reassignClustersToJet(tmpJet, initialShower, unassignedClusters, allSharedClusters, tolerance, limit);
+	if ( checkIfReassignmentNeeded(tmpJet, showerComponents, allSharedClusters) ) {
+	    List<Cluster> reassignedClusters = reassignClustersToTrack(tr, showerComponents, unassignedClusters, allSharedClusters, toleranceOfTrack, algorithm);
+	    if (reassignedClusters != null && reassignedClusters.size()>0) {
+		for (Cluster clus : reassignedClusters) {
+		    showerComponents.add(clus);
+		    unassignedClusters.remove(clus);
+		    newMapShowerComponentToTrack.put(clus, tr);
+		}
+	    }
+	}
     }
 
-    protected List<Cluster> reassignClustersToJet (Set<Track> jet, Collection<Cluster> initialShower, Collection<Cluster> unassignedClusters, List<SharedClusterGroup> allSharedClusters, double tolerance, double limit) {
-	LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
-	findCluster.process(m_event); // picks up geometry
-	Map<Track, Hep3Vector> interceptPointOfTrack = new HashMap<Track, Hep3Vector>();
-	Map<Track, Hep3Vector> tangentOfTrack= new HashMap<Track, Hep3Vector>();
-	for (Track tr : jet) {
-	    Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
-	    Hep3Vector tangent = findCluster.getTangent();
-	    if(interceptPoint==null) {
-		System.out.println("This track has no interceptpoint");
-		return null;
+    protected void checkJetForReassignments(Set<Track> jet, 
+					    Map<Set<Track>, Set<Cluster>> newMapJetToShowerComponents,
+					    Map<Cluster, Set<Track>> newMapShowerComponentToJet,
+					    List<SharedClusterGroup> allSharedClusters,
+					    Set<Cluster> unassignedClusters,
+					    ReassignClustersAlgorithm algorithm)
+    {
+	Set<Cluster> showerComponents = newMapJetToShowerComponents.get(jet);
+	if ( checkIfReassignmentNeeded(jet, showerComponents, allSharedClusters) ) {
+	    List<Cluster> reassignedClusters = reassignClustersToJet(jet, showerComponents, unassignedClusters, allSharedClusters, m_jetTolerance, algorithm);
+	    if (reassignedClusters != null && reassignedClusters.size()>0) {
+		for (Cluster clus : reassignedClusters) {
+		    showerComponents.add(clus);
+		    unassignedClusters.remove(clus);
+		    newMapShowerComponentToJet.put(clus, jet);
+		}
 	    }
-	    interceptPointOfTrack.put(tr, interceptPoint);
-	    tangentOfTrack.put(tr, tangent);
 	}
+    }
 
 
+    protected List<Cluster> reassignClustersToTrack (Track tr, Collection<Cluster> initialShower, Collection<Cluster> unassignedClusters, List<SharedClusterGroup> allSharedClusters, double tolerance, ReassignClustersAlgorithm reassignAlgorithm) 
+    {
+	Set<Track> tmpJet = new HashSet<Track>();
+	tmpJet.add(tr);
+	return reassignClustersToJet(tmpJet, initialShower, unassignedClusters, allSharedClusters, tolerance, reassignAlgorithm);
+    }
+
+    protected List<Cluster> reassignClustersToJet (Set<Track> jet, Collection<Cluster> initialShower, Collection<Cluster> unassignedClusters, List<SharedClusterGroup> allSharedClusters, double tolerance, ReassignClustersAlgorithm reassignAlgorithm) 
+    {
+	// Truth info debug
+	List<MCParticle> mcList = m_event.get(MCParticle.class, m_mcList);
+	System.out.println("DEBUG: Looking for clusters for the following tracks:");
+	for (Track tr : jet) {
+	    String printme = new String("  * Track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" with contributions from truth particles: ");
+	    List<MCParticle> truthList = getMCParticle(tr);
+	    for (MCParticle part : truthList) {
+		printme += "[" + part.getPDGID() + " with p="+ part.getMomentum().magnitude()+"]";
+	    }
+	    System.out.println(printme);
+	}
+
 	SortedMap<Double, Cluster> clustersSortedByAngle = new TreeMap();
 	for (Cluster clus : unassignedClusters) {
-	    double thetaofclus = clus.getITheta();
-	    double phiofclus = clus.getIPhi();
-	    Hep3Vector clusterPosition = new BasicHep3Vector(clus.getPosition());
-
-	    // Now, loop over each track in the jet and find the minimum angle:
+	    // Now, loop over each track in the jet and find the best match
 	    Track minTrack = null;
 	    double minAngle = 0;
 	    for (Track tr : jet) {
-		Hep3Vector interceptPoint = interceptPointOfTrack.get(tr);
-		Hep3Vector tangent = tangentOfTrack.get(tr);
-		Hep3Vector tangentUnit = VecOp.unit(tangent);
-		Hep3Vector displacement = VecOp.sub(clusterPosition, interceptPoint);
-		Hep3Vector displacementUnit = VecOp.unit(displacement);
-		double angle=Math.acos(VecOp.dot(tangentUnit, displacementUnit));
-		if (minTrack==null || angle<minAngle) {
-		    minTrack = tr;
-		    minAngle = angle;
+		Double angle = reassignAlgorithm.computeFigureOfMerit(tr, clus);
+		if (angle != null) {
+		    if (minTrack==null || angle<minAngle) {
+			minTrack = tr;
+			minAngle = angle;
+		    }
 		}
 	    }
-	    if (minTrack==null) { throw new AssertionError("Failed to find a valid track angle"); }
-	    if (minAngle <= limit) {
+	    if (minTrack==null) { 
+		// Didn't find a valid track match for this cluster -- ignoring
+	    } else {
 		clustersSortedByAngle.put(minAngle, clus);
 	    }
 	}
-	
+
 	List<Cluster> initialClustersPlusAdditions = new Vector<Cluster>(initialShower);
 	List<Cluster> output = new Vector<Cluster>();               
 	System.out.println("Starting cluster loop");
@@ -880,25 +686,26 @@
 	Iterator iter = clustersSortedByAngle.entrySet().iterator();
 	boolean previousIterationOK = false;
 	while(iter.hasNext()) {
-	    double energyBefore = energy(initialClustersPlusAdditions, allSharedClusters);
-	    boolean passesEoverPbefore = testEoverP_twoSided(energyBefore, trackMomentum, tolerance);
+	    // Basics
 	    Map.Entry entry = (Map.Entry) iter.next();
 	    Double key = (Double)entry.getKey();
 	    Cluster value = (Cluster)entry.getValue();
-	    initialClustersPlusAdditions.add(value);
-	    double energyAfter = energy(initialClustersPlusAdditions, allSharedClusters);
-	    if (key > limit) {
-		System.out.println("DEBUG: Stopping because angle="+key+" > limit="+limit+"... E would have been "+energyBefore+"  ->  "+energyAfter);
-		// Too wide an angle and we still haven't satisfied E/p.
-		// Abort (is this the right thing to do?)
-		if (previousIterationOK) {
-		    // We were OK before -- just don't add this cluster
-		    break;
-		} else {
-		    // Nope -- complete failure
-		    return null;
+	    if (m_debug) {
+		// Truth information check
+		Map<MCParticle, List<SimCalorimeterHit>> truthMap = truthInListFromHitList(value.getCalorimeterHits(), mcList);
+		String printme = new String("DEBUG: Cluster with "+value.getCalorimeterHits().size()+" hits at cone angle of "+key);
+		printme += " has contributions from:";
+		for (MCParticle part : truthMap.keySet()) {
+		    List<SimCalorimeterHit> hits = truthMap.get(part);
+		    printme += " [" + hits.size() + " hits from " + part.getPDGID()+" with p="+part.getMomentum().magnitude()+"]";
 		}
+		System.out.println(printme);
 	    }
+	    // Algorithm
+	    double energyBefore = energy(initialClustersPlusAdditions, allSharedClusters);
+	    boolean passesEoverPbefore = testEoverP_twoSided(energyBefore, trackMomentum, tolerance);
+	    initialClustersPlusAdditions.add(value);
+	    double energyAfter = energy(initialClustersPlusAdditions, allSharedClusters);
 	    boolean passesEoverPafter = testEoverP_twoSided(energyAfter, trackMomentum, tolerance);
 	    boolean passesEoverPafterTight = testEoverP_twoSided(energyAfter, trackMomentum, 1.0);
 	    boolean punchThrough = isPunchThrough(initialClustersPlusAdditions, allSharedClusters);  
@@ -1181,43 +988,7 @@
 	return Double.NaN;
     }
 
-    protected Hep3Vector jetThreeMomentum(Set<Track> jet) {
-	Hep3Vector totalJetMomentum = new BasicHep3Vector(0,0,0);
-	for (Track tr : jet) {
-	    Hep3Vector trackMomentum = momentum(tr);
-	    totalJetMomentum = VecOp.add(totalJetMomentum, trackMomentum);
-	}
-	return totalJetMomentum;
-    }
-    protected double jetScalarMomentum(Set<Track> jet) {
-	double totalJetEnergy = 0.0;
-	for (Track tr : jet) {
-	    Hep3Vector trackMomentum = momentum(tr);
-	    totalJetEnergy += trackMomentum.magnitude();
-	}
-	return totalJetEnergy;
-    }
 
-    protected Hep3Vector momentum(Track tr) {
-	Hep3Vector totalMomentum = null;
-	if (tr instanceof BaseTrackMC) {
-	    totalMomentum = ((BaseTrackMC)(tr)).getMCParticle().getMomentum();
-	} else if (tr instanceof MultipleTrackTrack) {
-	    totalMomentum = new BasicHep3Vector(0,0,0);
-	    for (Track subtr : tr.getTracks()) {
-		Hep3Vector subTrackMomentum = null;
-		if (subtr instanceof BaseTrackMC) {
-		    subTrackMomentum = ((BaseTrackMC)(subtr)).getMCParticle().getMomentum();
-		} else {
-		    subTrackMomentum = new BasicHep3Vector(tr.getMomentum());
-		}
-		totalMomentum = VecOp.add(totalMomentum, subTrackMomentum);
-	    }
-	} else {
-	    totalMomentum = new BasicHep3Vector(tr.getMomentum());
-	}
-	return totalMomentum;
-    }
 
     protected int countHitsInSideEdgesOfHcal(Cluster clus, int nLayersToCheck) {
 	// Pick up detector geometry
@@ -3655,5 +3426,7 @@
 	int flag = 1<<org.lcsim.util.lcio.LCIOConstants.CLBIT_HITS;
 	m_event.put("DebugMipsRejected", debugList, Cluster.class, flag);
     }
-	
+
+
 }
+
CVSspam 0.2.8