Print

Print


Commit in lcsim/src/org/lcsim/contrib/uiowa on MAIN
ReclusterDriver.java+22-71.21 -> 1.22
ReclusterDTreeDriver.java+421-51.21 -> 1.22
+443-12
2 modified files
MJC/TJK: When E<<p, try to pick up extra clusters downstream. For now, uses a simple cone algorithm

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDriver.java 1.21 -> 1.22
diff -u -r1.21 -r1.22
--- ReclusterDriver.java	31 May 2008 01:12:37 -0000	1.21
+++ ReclusterDriver.java	13 Jun 2008 22:46:38 -0000	1.22
@@ -37,7 +37,7 @@
   *
   * This version is PRELIMINARY.
   *
-  * @version $Id: ReclusterDriver.java,v 1.21 2008/05/31 01:12:37 mcharles Exp $
+  * @version $Id: ReclusterDriver.java,v 1.22 2008/06/13 22:46:38 mcharles Exp $
   * @author Mat Charles
   */
 
@@ -658,7 +658,7 @@
 				double energyOfNewShower = energy(newShowerOfTrack, allSharedClusters);
 				double tolerance = newMapTrackToTolerance.get(trackOfMatchedClusterOfBestLink).doubleValue();
 				if (m_debug) { System.out.println("DEBUG: Potential link from cluster ["+clus.getCalorimeterHits().size()+" hits] to a cluster ["+matchedClusterOfBestLink.getCalorimeterHits().size()+" hits] would move shift energy "+energyOfExistingShower+" --> "+energyOfNewShower+" (p="+trackMomentum); }
-				if (testEoverP(energyOfNewShower, trackOfMatchedClusterOfBestLink, tolerance)) {
+				if (testEoverP_oneSided(energyOfNewShower, trackOfMatchedClusterOfBestLink, tolerance)) {
 				    // OK, add it
 				    newMapTrackToShowerComponents.put(trackOfMatchedClusterOfBestLink, newShowerOfTrack);
 				    newMapShowerComponentToTrack.put(clus, trackOfMatchedClusterOfBestLink);
@@ -1341,20 +1341,35 @@
 	}
     }
 
-    protected boolean testEoverP(double clusterEnergy, Track tr, double tolerance) {
+    protected boolean testEoverP_oneSided(double clusterEnergy, Track tr, double tolerance) {
 	double[] trackThreeMomentum = tr.getMomentum();
 	double trackMomentumSq = trackThreeMomentum[0]*trackThreeMomentum[0] + trackThreeMomentum[1]*trackThreeMomentum[1] + trackThreeMomentum[2]*trackThreeMomentum[2];
 	double trackMomentum = Math.sqrt(trackMomentumSq);
-	return testEoverP(clusterEnergy, trackMomentum, tolerance);
+	return testEoverP_oneSided(clusterEnergy, trackMomentum, tolerance);
     }
-    protected boolean testEoverP(double clusterEnergy, double trackMomentum, double tolerance) {
+    protected boolean testEoverP_twoSided(double clusterEnergy, Track tr, double tolerance) {
+	double[] trackThreeMomentum = tr.getMomentum();
+	double trackMomentumSq = trackThreeMomentum[0]*trackThreeMomentum[0] + trackThreeMomentum[1]*trackThreeMomentum[1] + trackThreeMomentum[2]*trackThreeMomentum[2];
+	double trackMomentum = Math.sqrt(trackMomentumSq);
+	return testEoverP_twoSided(clusterEnergy, trackMomentum, tolerance);
+    }
+    protected boolean testEoverP_oneSided(double clusterEnergy, double trackMomentum, double tolerance) {
+	double trackMassSq = 0.140 * 0.140;
+	double trackMomentumSq = trackMomentum * trackMomentum;
+	double trackEnergySq = trackMomentumSq + trackMassSq;
+	double trackEnergy = Math.sqrt(trackEnergySq);
+	double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
+	double normalisedResidual = (clusterEnergy - trackEnergy)/trackEnergySigma;
+	return (normalisedResidual < tolerance); // not abs
+    }
+    protected boolean testEoverP_twoSided(double clusterEnergy, double trackMomentum, double tolerance) {
 	double trackMassSq = 0.140 * 0.140;
 	double trackMomentumSq = trackMomentum * trackMomentum;
 	double trackEnergySq = trackMomentumSq + trackMassSq;
 	double trackEnergy = Math.sqrt(trackEnergySq);
 	double trackEnergySigma = 0.7 * Math.sqrt(trackEnergy);
 	double normalisedResidual = (clusterEnergy - trackEnergy)/trackEnergySigma;
-	return (normalisedResidual < tolerance);
+	return (Math.abs(normalisedResidual) < tolerance); // abs
     }
     protected double energy(Cluster clus) {
 	return energy(clus, m_chargedCalib);
@@ -2282,7 +2297,7 @@
 			    	double energyOfExistingShowerPlusAdditionalClusters = energy(existingShowerPlusAdditionalClusters, allSharedClusters);
 			    	long timeAfterEnergyCalculation = Calendar.getInstance().getTimeInMillis(); // DEBUG
 			    	if (m_debugTiming) { System.out.println("DEBUG: Energy calculation took "+(timeAfterEnergyCalculation-timeBeforeEnergyCalculation)+" ms (and prep took"+(timeBeforeEnergyCalculation-timeAfterProbeFullPropagation)+" ms)"); }
-			    	boolean testValidEoverP = testEoverP(energyOfExistingShowerPlusAdditionalClusters, tr, tolerance);
+			    	boolean testValidEoverP = testEoverP_oneSided(energyOfExistingShowerPlusAdditionalClusters, tr, tolerance);
 			    	long timeAfterTestEoverP = Calendar.getInstance().getTimeInMillis(); // DEBUG
 			    	if (m_debugTiming) { System.out.println("DEBUG: E/p test took "+(timeAfterTestEoverP-timeAfterEnergyCalculation)+" ms"); }
 			    	if (testValidEoverP) {

lcsim/src/org/lcsim/contrib/uiowa
ReclusterDTreeDriver.java 1.21 -> 1.22
diff -u -r1.21 -r1.22
--- ReclusterDTreeDriver.java	9 Jun 2008 16:32:25 -0000	1.21
+++ ReclusterDTreeDriver.java	13 Jun 2008 22:46:38 -0000	1.22
@@ -34,7 +34,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.21 2008/06/09 16:32:25 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.22 2008/06/13 22:46:38 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -514,6 +514,292 @@
 
 	applyOverrides(linkableClusters, photonLikePhotons, electronClusters, tracksMatchedToClusters, newMapShowerComponentToTrack, newMapTrackToShowerComponents, newMapJetToShowerComponents, newMapShowerComponentToJet, mapTrackToJet, newMapTrackToVetoedAdditions, newMapTrackToTolerance, allSharedClusters);
 
+	// Book-keeping for cone-adding
+	Set<Cluster> unassignedClusters = new HashSet<Cluster>();
+	for (Cluster clus : linkableClusters) {
+	    Track matchedTrack = newMapShowerComponentToTrack.get(clus);
+	    Set<Track> matchedJet = newMapShowerComponentToJet.get(clus);
+	    if (matchedTrack == null && matchedJet == null) {
+		unassignedClusters.add(clus);
+	    }
+	}
+	boolean m_fixSingleTracksWithCone = true;
+	boolean m_fixJetsWithCone = true;
+	if (m_fixSingleTracksWithCone) {
+	    // First, try to fix the simplest case: single tracks with E/p < 1
+	    for (Track tr : tracksSortedByMomentum) {
+		double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+		if (mapTrackToJet.get(tr) != null) {
+		    // From a jet -- ignore
+		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it comes from a jet");
+		    continue;
+		}
+		Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+		boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
+		if (punchThrough) {
+		    // Punches through -- no E/p info -- ignore
+		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it punches through");
+		    continue;
+		}
+		double showerEnergy = energy(showerComponents, allSharedClusters);
+		double toleranceOfTrack = newMapTrackToTolerance.get(tr);
+		boolean passesEoverP = testEoverP_twoSided(showerEnergy, trackMomentum, toleranceOfTrack);
+		if (passesEoverP) {
+		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it passes E/p (E="+showerEnergy+")");
+		    // Passes E/p check -- ignore
+		    continue;
+		} else if (showerEnergy > trackMomentum) {
+		    System.out.println("DEBUG: Ignoring track with p="+trackMomentum+" since it E>>p (E="+showerEnergy+")");
+		    // E>>p -- ignore
+		    continue;
+		}
+		// OK. This single track needs more energy.
+		System.out.println("DEBUG: Handling track with p="+trackMomentum+" since it fails E/p (E="+showerEnergy+")...");
+		double limit = 1.0; // arbitrary
+		List<Cluster> reassignedClusters = reassignClustersToTrack(tr, showerComponents, unassignedClusters, allSharedClusters, toleranceOfTrack, limit);
+		if (reassignedClusters != null && reassignedClusters.size()>0) {
+		    for (Cluster clus : reassignedClusters) {
+			showerComponents.add(clus);
+			unassignedClusters.remove(clus);
+			newMapShowerComponentToTrack.put(clus, tr);
+		    }
+		}
+		double newShowerEnergy = energy(showerComponents, allSharedClusters);
+		boolean newPassesEoverP = testEoverP_twoSided(newShowerEnergy, trackMomentum, toleranceOfTrack);
+		boolean newPunchThrough = isPunchThrough(showerComponents, allSharedClusters);
+		System.out.println("DEBUG: After handling track with p="+trackMomentum+", new E = "+newShowerEnergy+" => new E/p="+(newShowerEnergy/trackMomentum)+" => pass="+newPassesEoverP+"; punch-through="+newPunchThrough);
+	    }
+	}
+	if (m_fixJetsWithCone) {
+	    for (Set<Track> jet : jets) {
+		Set<Cluster> showerComponents = newMapJetToShowerComponents.get(jet);
+		boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);
+		double jetMomentum = jetScalarMomentum(jet);
+		if (punchThrough) {
+		    // Punches through -- no E/p info -- ignore
+		    System.out.println("DEBUG: Ignoring jet with total p="+jetMomentum+" since it punches through");
+		    continue;
+		}
+		double jetEnergy = energy(showerComponents, allSharedClusters);
+		boolean passesEoverP = testEoverP_twoSided(jetEnergy, jetMomentum, m_jetTolerance);
+		if (passesEoverP) {
+		    System.out.println("DEBUG: Ignoring jet with total p="+jetMomentum+" since it passes E/p (E="+jetEnergy+")");
+		    // Passes E/p check -- ignore
+		    continue;
+		} else if (jetEnergy > jetMomentum) {
+		    System.out.println("DEBUG: Ignoring jet with p="+jetMomentum+" since it E>>p (E="+jetEnergy+")");
+		    // E>>p -- ignore
+		    continue;
+		}
+		// OK. This jet needs more energy
+		System.out.println("DEBUG: Handling jet with total p="+jetMomentum+" since it fails E/p (E="+jetEnergy+")...");
+		double limit = 1.0; // arbitrary
+		List<Cluster> reassignedClusters = reassignClustersToJet(jet, showerComponents, unassignedClusters, allSharedClusters, m_jetTolerance, limit);
+		if (reassignedClusters != null && reassignedClusters.size()>0) {
+		    for (Cluster clus : reassignedClusters) {
+			showerComponents.add(clus);
+			unassignedClusters.remove(clus);
+			newMapShowerComponentToJet.put(clus, jet);
+		    }
+		}
+		double newJetEnergy = energy(showerComponents, allSharedClusters);
+		boolean newPassesEoverP = testEoverP_twoSided(newJetEnergy, jetMomentum, m_jetTolerance);
+		boolean newPunchThrough = isPunchThrough(showerComponents, allSharedClusters);
+		System.out.println("DEBUG: After handling jet with total p="+jetMomentum+", new E = "+newJetEnergy+" => new E/p="+(newJetEnergy/jetMomentum)+" => pass="+newPassesEoverP+"; punch-through="+newPunchThrough);
+	    }
+	}
+
+	/*
+
+	// Taejeong's code below
+
+      	// HERE
+	// 1) Look for particles with E/p << 1 that don't punch through [see updateScoring() code]
+	// 2) Look for non-assigned clusters & hits
+	// 3) Check position of non-assigned clusters/hits -- are they downstream of track?
+	// 4) Try to assign clusters/hits with the right position and energy to balance E/p`
+
+	// Example (Print out the momentum and energy)
+	System.out.println("New event starting...");
+	System.out.println();
+	double anglecut = 0.3;
+	double EoverPcut = 0.2;
+    for (Track tr : tracksSortedByMomentum) {
+    	Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
+        double clusterEnergy = energy(showerComponents, allSharedClusters);
+        double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+        double EoverP = clusterEnergy/trackMomentum;
+        boolean punchThrough = isPunchThrough(showerComponents, allSharedClusters);  
+        //print P and E
+        System.out.print("P = " + trackMomentum);
+        System.out.print("	E = " + clusterEnergy);
+        System.out.println("	E/P = " + EoverP);
+               
+        LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
+        findCluster.process(event); // picks up geometry
+        Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
+        Hep3Vector tangent = findCluster.getTangent();
+		
+        if(interceptPoint==null) {
+			System.out.println("This track has no interceptpoint");
+			continue;
+	    }
+		
+        boolean debug = false;
+        //covert to theta and phi from x,y,z position
+        double intX = interceptPoint.x();
+        double intY = interceptPoint.y();
+        double intZ = interceptPoint.z();  
+        double tanX = tangent.x();
+        double tanY = tangent.y();
+        double tanZ = tangent.z();
+		double momZ = tr.getMomentum()[2];
+		if (momZ * tanZ < 0.0) {
+		    tanX = -tanX;
+		    tanY = -tanY;
+		    tanZ = -tanZ;
+		    tangent = new BasicHep3Vector(tanX, tanY, tanZ);
+		    //System.out.println("WARNING: Flipped a track tangent direction");
+		}
+
+        if(debug){
+            System.out.println("intx= "+intX+" inty= "+intY+" intZ= "+intZ);
+            System.out.println("tanx= "+tanX+" tany= "+tanY+" tanZ= "+tanZ);
+        }
+              
+        if (mapTrackToJet.get(tr) != null) {
+        	// Treat as jet
+        	Set<Track> jet = mapTrackToJet.get(tr);
+        	Set<Cluster> jetComponents = newMapJetToShowerComponents.get(jet);
+            double jetEnergy = energy(jetComponents, allSharedClusters);
+            double jetMomentumMag = jetScalarMomentum(jet);
+            System.out.println("      from jet with E="+jetEnergy+" and P="+jetMomentumMag);
+        } else {
+        		if ( EoverP < EoverPcut && !punchThrough ) {
+                    System.out.println("This track fails EoverP and also not puchthrough! need to reassign clusters");
+                    //Linkable clusters which are not assigned!
+                	List<Cluster> reassignedLinkable = reassignClustersToTrack(interceptPoint, tangent, linkableClusters , anglecut, newMapShowerComponentToTrack, allSharedClusters, clusterEnergy, trackMomentum);
+                	for(Cluster clus : reassignedLinkable) {
+                		double reassignedClusterEnergy = energy(clus, allSharedClusters);      
+                    	System.out.println("a downstrem of this track with "+reassignedClusterEnergy +" GeV");  
+                	}
+                    Set<Cluster> newShowerComponents = new HashSet<Cluster>();
+					newShowerComponents.addAll(showerComponents);
+					newShowerComponents.addAll(reassignedLinkable);
+					double newEnergy = energy(newShowerComponents, allSharedClusters);
+				    System.out.println("This track: clusterE=  " + clusterEnergy+ ", P= " + trackMomentum+ " now E= "+newEnergy);
+                	// Shared hits?
+                	//  -> smallClustersToShare
+                	//  -> leftoverHitClustersToShare
+                	// How much energy from "shared hits" in this cone?
+                	List<Cluster> bothListsOfSharedClusters = new Vector<Cluster>();
+                	bothListsOfSharedClusters.addAll(smallClustersToShare);
+                	bothListsOfSharedClusters.addAll(leftoverHitClustersToShare);
+                	// Look for which are in cone
+                	List<Cluster> sharedClustersInCone = reassignClustersToTrack(interceptPoint, tangent, bothListsOfSharedClusters , anglecut, newMapShowerComponentToTrack, allSharedClusters, clusterEnergy, trackMomentum);
+                	Cluster clusterOfSharedHitsInCone = makeCombinedCluster(sharedClustersInCone);
+                	double energyOfSharedHitsInCone = energy(clusterOfSharedHitsInCone);
+                	System.out.println(clusterOfSharedHitsInCone.getCalorimeterHits().size()+" shared hits from "+sharedClustersInCone.size()+" clusters in cone give total of "+energyOfSharedHitsInCone+" GeV");
+
+                	// All hits!
+    		    	List<Cluster> allHitsInEvent = new Vector<Cluster>();
+					for (CalorimeterHit hit : allHits) {
+						BasicCluster newClus = new BasicCluster();
+						newClus.addHit(hit);
+						allHitsInEvent.add(newClus);
+		     		}
+		     		List<Cluster> allHitsInCone = reassignClustersToTrack(interceptPoint, tangent, allHitsInEvent, anglecut, newMapShowerComponentToTrack, allSharedClusters, clusterEnergy, trackMomentum );
+		     		Cluster clusterOfAllHitsInCone = makeCombinedCluster(allHitsInCone);
+                	double energyOfAllHitsInCone = energy(clusterOfAllHitsInCone);
+		     		System.out.println(clusterOfAllHitsInCone.getCalorimeterHits().size()+" hits from "+allHitsInCone.size()+" clusters in cone give total of "+energyOfAllHitsInCone+" GeV");
+        		}
+        }  
+	}
+	
+	// Loop over jets...
+	System.out.println("Loop over jets");
+	System.out.println("number of jet= " + jets.size());
+	for (Set<Track> jet : jets) {
+		System.out.println("jet size= " + jet.size());
+        Set<Cluster> jetComponents = newMapJetToShowerComponents.get(jet);
+	    double jetEnergy = energy(jetComponents, allSharedClusters);
+	    double jetMomentumMag = jetScalarMomentum(jet);
+	    double EoverP = jetEnergy / jetMomentumMag;
+	    boolean punchThrough = isPunchThrough(jetComponents, allSharedClusters);
+	    if ( EoverP < EoverPcut && !punchThrough ) {
+		Set<Cluster> reassigned = new HashSet<Cluster>();
+		for (Track tr : jet) {
+		    LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
+		    findCluster.process(event); // picks up geometry
+		    Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
+		    Hep3Vector tangent = findCluster.getTangent();
+		    // [flip?]
+            double intX = interceptPoint.x();
+            double intY = interceptPoint.y();
+            double intZ = interceptPoint.z();
+            double tanX = tangent.x();
+            double tanY = tangent.y();
+            double tanZ = tangent.z();
+            double momZ = tr.getMomentum()[2];
+            if (momZ * tanZ < 0.0) {
+                tanX = -tanX;
+                tanY = -tanY;
+                tanZ = -tanZ;
+                tangent = new BasicHep3Vector(tanX, tanY, tanZ);
+                System.out.println("WARNING: Flipped a track tangent direction");
+            }
+                                                                                                                    
+		    List<Cluster> reassignedLinkable = reassignClustersToTrack(interceptPoint, tangent, linkableClusters , anglecut, newMapShowerComponentToTrack, allSharedClusters, jetEnergy, jetMomentumMag);
+		    reassigned.addAll(reassignedLinkable);
+  		}
+		Set<Cluster> newJetComponents = new HashSet<Cluster>();
+		newJetComponents.addAll(jetComponents);
+		newJetComponents.addAll(reassigned);
+		double newEnergy = energy(newJetComponents, allSharedClusters);
+		System.out.println("Jet E was  " + jetEnergy+ ", P was " + jetMomentumMag+ " E is now "+newEnergy);
+	    }
+
+	}	
+
+
+	// Several kinds of cluster:
+	// * old mips & new mips
+	// * clumps
+	// * blocks (large DTrees with no structure)
+	// * photons
+	// * a few others
+	// * plus small clusters & leftover hits
+	// All except the last fall into "linkableClusters"
+	// (and also small clusters etc in "smallClustersToShare" and "leftoverHitClustersToShare")
+
+//	for (Cluster clus : linkableClusters) {
+//	    Track trclus = newMapShowerComponentToTrack.get(clus);
+//	    if (trclus == null) {
+//               System.out.println("Cluster doesn't point to anything");
+ //               System.out.println("Hit size= " + clus.getCalorimeterHits().size());
+//              System.out.println("E= " + clus.getEnergy());
+//                double[] pos = clus.getPosition();
+//                double thetaofclus = clus.getITheta();
+//                double phiofclus = clus.getIPhi();
+//                     
+//                for (Track tr : tracksSortedByMomentum) {
+ //               	double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+  //              	double Px = tr.getPX();
+   //             	double Py = tr.getPY();
+    //            	double Pz = tr.getPZ();
+     //           	double thetaoftrk = Math.acos(Pz/trackMomentum);
+      //          	double phioftrk = Math.atan(Py/Px);
+       //                 double deltaR = Math.sqrt((thetaofclus-thetaoftrk)*(thetaofclus-thetaoftrk)+(phiofclus-phioftrk)*(phiofclus-phioftrk));
+         //               if(deltaR < 0.1) System.out.println("This cluster is a downstrem of the track with " + trackMomentum);
+          //      }
+          //      System.out.println("This cluster is located at "+ thetaofclus + ", " + phiofclus);
+	   // } else {
+           // }
+//	}
+
+
+	*/
+
         if (m_debug) {
 	    printStatus("FINAL STATUS:", tracksSortedByMomentum, allSharedClusters, newMapTrackToShowerComponents, newMapShowerComponentToTrack, newMapTrackToThreshold, newMapTrackToTolerance, modifiedPhotonClusters, mips, clumps, treesWithNoStructure, seedLeftoverHitClusters, newMapTrackToVetoedAdditions);
 	}
@@ -535,6 +821,136 @@
 	m_event = null;
     }
 
+    protected List<Cluster> reassignClustersToTrack (Track tr, Collection<Cluster> initialShower, Collection<Cluster> unassignedClusters, List<SharedClusterGroup> allSharedClusters, double tolerance, double limit) {
+	Set<Track> tmpJet = new HashSet<Track>();
+	tmpJet.add(tr);
+	return reassignClustersToJet(tmpJet, initialShower, unassignedClusters, allSharedClusters, tolerance, limit);
+    }
+
+    protected List<Cluster> reassignClustersToJet (Set<Track> jet, Collection<Cluster> initialShower, Collection<Cluster> unassignedClusters, List<SharedClusterGroup> allSharedClusters, double tolerance, double limit) {
+	LocalHelixExtrapolator findCluster = new LocalHelixExtrapolator();
+	findCluster.process(m_event); // picks up geometry
+	Map<Track, Hep3Vector> interceptPointOfTrack = new HashMap<Track, Hep3Vector>();
+	Map<Track, Hep3Vector> tangentOfTrack= new HashMap<Track, Hep3Vector>();
+	for (Track tr : jet) {
+	    Hep3Vector interceptPoint = findCluster.performExtrapolation(tr);
+	    Hep3Vector tangent = findCluster.getTangent();
+	    if(interceptPoint==null) {
+		System.out.println("This track has no interceptpoint");
+		return null;
+	    }
+	    interceptPointOfTrack.put(tr, interceptPoint);
+	    tangentOfTrack.put(tr, tangent);
+	}
+
+
+	SortedMap<Double, Cluster> clustersSortedByAngle = new TreeMap();
+	for (Cluster clus : unassignedClusters) {
+	    double thetaofclus = clus.getITheta();
+	    double phiofclus = clus.getIPhi();
+	    Hep3Vector clusterPosition = new BasicHep3Vector(clus.getPosition());
+
+	    // Now, loop over each track in the jet and find the minimum angle:
+	    Track minTrack = null;
+	    double minAngle = 0;
+	    for (Track tr : jet) {
+		Hep3Vector interceptPoint = interceptPointOfTrack.get(tr);
+		Hep3Vector tangent = tangentOfTrack.get(tr);
+		Hep3Vector tangentUnit = VecOp.unit(tangent);
+		Hep3Vector displacement = VecOp.sub(clusterPosition, interceptPoint);
+		Hep3Vector displacementUnit = VecOp.unit(displacement);
+		double angle=Math.acos(VecOp.dot(tangentUnit, displacementUnit));
+		if (minTrack==null || angle<minAngle) {
+		    minTrack = tr;
+		    minAngle = angle;
+		}
+	    }
+	    if (minTrack==null) { throw new AssertionError("Failed to find a valid track angle"); }
+	    if (minAngle <= limit) {
+		clustersSortedByAngle.put(minAngle, clus);
+	    }
+	}
+	
+	List<Cluster> initialClustersPlusAdditions = new Vector<Cluster>(initialShower);
+	List<Cluster> output = new Vector<Cluster>();               
+	System.out.println("Starting cluster loop");
+	double startingEnergy = energy(initialShower, allSharedClusters);
+	double trackMomentum = jetScalarMomentum(jet);
+	System.out.println("original E= " + startingEnergy + " P= " + trackMomentum + " E/P= " + (startingEnergy/trackMomentum));
+	Iterator iter = clustersSortedByAngle.entrySet().iterator();
+	boolean previousIterationOK = false;
+	while(iter.hasNext()) {
+	    double energyBefore = energy(initialClustersPlusAdditions, allSharedClusters);
+	    boolean passesEoverPbefore = testEoverP_twoSided(energyBefore, trackMomentum, tolerance);
+	    Map.Entry entry = (Map.Entry) iter.next();
+	    Double key = (Double)entry.getKey();
+	    Cluster value = (Cluster)entry.getValue();
+	    initialClustersPlusAdditions.add(value);
+	    double energyAfter = energy(initialClustersPlusAdditions, allSharedClusters);
+	    if (key > limit) {
+		System.out.println("DEBUG: Stopping because angle="+key+" > limit="+limit+"... E would have been "+energyBefore+"  ->  "+energyAfter);
+		// Too wide an angle and we still haven't satisfied E/p.
+		// Abort (is this the right thing to do?)
+		if (previousIterationOK) {
+		    // We were OK before -- just don't add this cluster
+		    break;
+		} else {
+		    // Nope -- complete failure
+		    return null;
+		}
+	    }
+	    boolean passesEoverPafter = testEoverP_twoSided(energyAfter, trackMomentum, tolerance);
+	    boolean passesEoverPafterTight = testEoverP_twoSided(energyAfter, trackMomentum, 1.0);
+	    boolean punchThrough = isPunchThrough(initialClustersPlusAdditions, allSharedClusters);  
+	    if (passesEoverPafter) {
+		// OK -- satisfies E/p now
+		System.out.println("DEBUG: E/p now passes... E is "+energyBefore+"  ->  "+energyAfter);
+		if (!passesEoverPafterTight && energyAfter > trackMomentum && previousIterationOK) {
+		    // ... but we're running over the safe edge and we were OK before => stop and don't add it
+		    previousIterationOK = true;
+		    break;
+		} else {
+		    // Add this cluster and be willing to add more
+		    previousIterationOK = true;
+		    output.add(value);
+		}
+	    } else if (!passesEoverPafter && energyAfter < trackMomentum) {
+		if (punchThrough) {
+		    // Raw energy < target, but punches through so that's OK -- stop
+		    previousIterationOK = true;
+		    System.out.println("DEBUG: Punch-through now passes... E is "+energyBefore+"  ->  "+energyAfter);
+		    output.add(value);
+		} else {
+		    // Need more energy -- add cluster and keep going
+		    output.add(value);
+		}
+	    } else {
+		System.out.println("DEBUG: Stopping because now E>>p...E is "+energyBefore+"  ->  "+energyAfter);
+		// We over-shot -- now E is too large.
+		// Don't add this cluster.
+		if (previousIterationOK) {
+		    // We were OK before -- just don't add this cluster
+		    break;
+		} else {
+		    // Nope -- complete failure
+		    return null;
+		}
+	    }
+	    System.out.println("DEBUG: Step: key= " + key + " energy= " + energyAfter + " E/P= " + (energyAfter/trackMomentum));
+	}
+
+	if (!previousIterationOK) {
+	    if (!iter.hasNext()) {
+		System.out.println("DEBUG: Stopping because ran out of clusters -- fail");
+		return null;
+	    } else {
+		throw new AssertionError("Internal consistency failure"); 
+	    }
+	}
+	return output;
+    }
+
+
     // In this weighting scheme
     //   * Components not in the same DTree cluster get a significant penalty factor
     //   * Components in the same DTree cluster get a small additive bonus (to take the minimum above zero)
@@ -1343,7 +1759,7 @@
 	}
     }
 
-    boolean isPunchThrough(Set<Cluster> showerComponents, List<SharedClusterGroup> allSharedClusters) {
+    boolean isPunchThrough(Collection<Cluster> showerComponents, List<SharedClusterGroup> allSharedClusters) {
 	Cluster clusterToCheckForPunchThrough = null;
 	if (m_checkSharedHitsForPunchThrough) {
 	    Cluster sharedHitCluster = makeClusterOfSharedHits(showerComponents, allSharedClusters);
@@ -1812,7 +2228,7 @@
 				existingShowerPlusAdditionalClusters.addAll(impliedAdditionalClusters);
 				existingShowerPlusAdditionalClusters.addAll(componentsInNextTier);
 				double energyOfExistingShowerPlusAdditionalClusters = energy(existingShowerPlusAdditionalClusters, allSharedClusters);
-				boolean testValidEoverP = testEoverP(energyOfExistingShowerPlusAdditionalClusters, totalJetEnergy, m_jetTolerance);
+				boolean testValidEoverP = testEoverP_oneSided(energyOfExistingShowerPlusAdditionalClusters, totalJetEnergy, m_jetTolerance);
 				if (testValidEoverP) {
 				    componentsInNextTier.addAll(impliedAdditionalClusters);
 				}
@@ -3052,9 +3468,9 @@
 				}
 				boolean testEoverPmatch = false;
 				if (treatAsSingleTrack) {
-				    testEoverPmatch = testEoverP(energyOfNewShower, trackOfMatchedClusterOfBestLink, tolerance);
+				    testEoverPmatch = testEoverP_oneSided(energyOfNewShower, trackOfMatchedClusterOfBestLink, tolerance);
 				} else {
-				    testEoverPmatch = testEoverP(energyOfNewShower, jetScalarMomentum(jet), tolerance);
+				    testEoverPmatch = testEoverP_oneSided(energyOfNewShower, jetScalarMomentum(jet), tolerance);
 				}
 				if (testEoverPmatch) {
 				    // OK, add it
CVSspam 0.2.8