Commit in lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa on MAIN
recon/cluster/clumpfinder/AlternateClumpFinder.java+1-11.2 -> 1.3
                         /ClumpFinder.java+1-11.2 -> 1.3
                         /HighHitDensityDecision.java+1-11.2 -> 1.3
recon/cluster/clumpfinder/kmean/LocalDensityMaximumDecision.java+1-11.2 -> 1.3
recon/cluster/structural/ChargedNeutralFragmentSeparator.java+1-11.2 -> 1.3
                        /CheatFragmentIdentifier.java+1-11.2 -> 1.3
                        /CheatLikelihoodLinkDriver.java+1-11.2 -> 1.3
                        /DropFragments.java+1-11.2 -> 1.3
                        /FragmentHandler.java+1-11.2 -> 1.3
                        /FragmentIdentifier.java+1-11.2 -> 1.3
                        /FragmentIdentifierDecisionMaker.java+1-11.2 -> 1.3
                        /FragmentMerger.java+1-11.2 -> 1.3
                        /GenericStructuralDriver.java+1-11.2 -> 1.3
                        /HaloAssigner.java+1-11.2 -> 1.3
                        /LikelihoodFindingStructuralDriver.java+1-11.2 -> 1.3
                        /LikelihoodLinkDriver.java+1-11.2 -> 1.3
                        /LikelihoodLinkPlotDriver.java+1-11.2 -> 1.3
                        /Link.java+1-11.2 -> 1.3
                        /SimpleFragmentIdentifier.java+1-11.2 -> 1.3
                        /SimpleFragmentMerger.java+1-11.2 -> 1.3
recon/cluster/structural/likelihood/MiscUtilities.java+1-11.1 -> 1.2
                                   /TrackToTrackDOCA.java+1-11.2 -> 1.3
                                   /TrackToTrackPOCAInCalorimeter.java+1-11.2 -> 1.3
                                   /TrackToTrackSmallestDistanceToPOCA.java+1-11.2 -> 1.3
recon/pfa/debug/Shower2D.java+36added 1.1
               /ShowerBranch2D.java+138added 1.1
               /DebugParticles.java+1-11.1 -> 1.2
               /DebugPhotonDriver.java+1-11.1 -> 1.2
               /DebugRegionalEoverP.java+40-11.1 -> 1.2
               /DebugShowersDriver.java+2129-1461.1 -> 1.2
               /DebugTrackSeedMatchingDriver.java+1-11.1 -> 1.2
               /DebugTrackToClusterSpecialCases.java+2-21.1 -> 1.2
               /DebugUtils.java+1-11.1 -> 1.2
recon/pfa/identifier/AmbiguousTrackToClusterMapMaker.java+118added 1.1
                    /CheatHelixTrackClusterMatcher.java+46added 1.1
                    /CheatHelixTrackMIPClusterMatcher.java+34added 1.1
                    /CheatTrackClusterMatcher.java+396added 1.1
                    /DualActionTrackClusterMatcher.java+55added 1.1
                    /FlexibleHelixExtrapolator.java+81added 1.1
                    /HelixExtrapolationResult.java+77added 1.1
                    /HelixExtrapolator.java+362added 1.1
                    /LocalHelixExtrapolationTrackClusterMatcher.java+211added 1.1
                    /LocalHelixExtrapolationTrackMIPClusterMatcher.java+83added 1.1
                    /LocalHelixExtrapolator.java+462added 1.1
                    /MIPChargedParticleMaker.java+346added 1.1
                    /MultipleTrackTrack.java+53added 1.1
                    /SequentialTrackClusterMatcher.java+44added 1.1
                    /SimpleChargedParticleMaker.java+165added 1.1
                    /SimpleNeutralParticleMaker.java+121added 1.1
                    /SimpleTrackClusterMatcher.java+478added 1.1
                    /SimpleTrackMIPClusterMatcher.java+72added 1.1
                    /SmallPhotonMaker.java+144added 1.1
                    /TrackClusterMatcher.java+26added 1.1
                    /TrackHelixExtrapolator.java+232added 1.1
                    /TrackHelixPlusHitExtrapolator.java+348added 1.1
                    /TrackToClusterMapMaker.java+53added 1.1
                    /TrackToElectronMapMaker.java+165added 1.1
                    /TrackToGenericClusterMapMaker.java+112added 1.1
                    /TrackToMipClusterMapMaker.java+102added 1.1
                    /TrackToPreShowerMipMapMaker.java+159added 1.1
recon/pfa/structural/PFADetectorLayer.java+376added 1.1
                    /PFAWrapper.java+168added 1.1
                    /SlicedShowerBuilder.java+3754added 1.1
                    /BaselineShowerBuilder.java+12-121.2 -> 1.3
                    /ChargedHadronClusterEnergyCalculator.java+1-11.2 -> 1.3
                    /CheckDisjoint.java+2-21.2 -> 1.3
                    /CheckSkeletonsForMultipleTracks.java+1-11.1 -> 1.2
                    /ConeMIPReassignmentAlgorithm.java+1-11.2 -> 1.3
                    /ConeReassignmentAlgorithm.java+1-11.1 -> 1.2
                    /ExampleGenerateLikelihood.java+1-11.2 -> 1.3
                    /FuzzyCalorimeterHit.java+1-11.2 -> 1.3
                    /FuzzyNeutralHadronClusterEnergyCalculator.java+1-11.2 -> 1.3
                    /FuzzyPhotonClusterEnergyCalculator.java+1-11.2 -> 1.3
                    /FuzzyQNeutralHadronClusterEnergyCalculator.java+1-11.2 -> 1.3
                    /FuzzyQPhotonClusterEnergyCalculator.java+1-11.2 -> 1.3
                    /HelixTangentMIPGeometryHandler.java+2-21.2 -> 1.3
                    /HitBookKeeper.java+1-11.2 -> 1.3
                    /HitFilterDriver.java+1-11.2 -> 1.3
                    /LayerBasedMIPGeometryHandler.java+2-21.2 -> 1.3
                    /MIPGeometryHandler.java+1-11.2 -> 1.3
                    /MergeClustersCrossingSubDetectorBoundaries.java+1-11.2 -> 1.3
                    /NewShowerBuilder.java+1238-6691.1 -> 1.2
                    /NonTrivialPFA.java+8-81.2 -> 1.3
                    /PFABookKeepingBroker.java+1-11.2 -> 1.3
                    /PFAParticleMaker.java+2-21.1 -> 1.2
                    /PFAUtil.java+4-21.2 -> 1.3
                    /PhotonVetoDecision.java+1-11.1 -> 1.2
                    /PhotonVetoDriver.java+1-11.1 -> 1.2
                    /PreShowerMIPReassignmentAlgorithm.java+1-11.1 -> 1.2
                    /ReclusterDTreeDriver.java+28-171.2 -> 1.3
                    /ReclusterDriver.java+3-31.2 -> 1.3
                    /RunAndWriteOutPFAFullTracking.java+24-81.2 -> 1.3
                    /SetUpDTreeForReclustering.java+17-121.3 -> 1.4
                    /SetUpPFA.java+34-151.2 -> 1.3
                    /TrackToClusterCosAngle.java+1-11.1 -> 1.2
                    /TrackToClusterDistance.java+1-11.1 -> 1.2
                    /TrackToClusterForce.java+1-11.1 -> 1.2
                    /TrackToClusterLikelihoodQuantity.java+1-11.1 -> 1.2
                    /TrackToClusterSpecialCasesMapMaker.java+3-61.3 -> 1.4
recon/pfa/structural/sharing/ClusterSharingAlgorithmWrapper.java-21.2 -> 1.3
recon/pfa/structural/shower/ShowerBranch.java+92added 1.1
                           /ShowerWithBranches.java+112added 1.1
                           /Shower.java+1-11.2 -> 1.3
                           /ShowerContainer.java+4-21.2 -> 1.3
scripts/compile.sh-111.1 removed
       /computeCorrelations.sh-31.1 removed
       /computeEfficiencyVsRejection.sh-31.1 removed
       /exportCLASSPATH.sh-91.1 removed
       /importCode.csh-1861.1 removed
       /java-init-cache.sh-261.1 removed
       /java-init.sh-211.1 removed
       /java-run.sh-171.1 removed
       /macro-run.sh-91.1 removed
       /mergeAIDAFiles.sh-31.1 removed
       /mergeLikelihoodFiles.sh-51.1 removed
       /normalize.sh-31.1 removed
       /plotLikelihoods.sh-31.1 removed
       /run-pfa.sh-101.1 removed
       /submit.sh-1061.1 removed
+12826-1379
34 added + 15 removed + 70 modified, total 119 files
pfa update

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/clumpfinder
AlternateClumpFinder.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- AlternateClumpFinder.java	23 Oct 2011 09:50:28 -0000	1.2
+++ AlternateClumpFinder.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -14,7 +14,7 @@
   * up clusters which have multiple local maxima in energy since that may indicate
   * that we actually have two separate showers/clusters.
   *
-  * @version $Id: AlternateClumpFinder.java,v 1.2 2011/10/23 09:50:28 zaidan Exp $
+  * @version $Id: AlternateClumpFinder.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
   */
 
 public class AlternateClumpFinder extends ClumpFinder

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/clumpfinder
ClumpFinder.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ClumpFinder.java	23 Oct 2011 09:50:28 -0000	1.2
+++ ClumpFinder.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -29,7 +29,7 @@
  * or more for each hit. This is hard-coded at the moment, but
  * should become a user-definable in a later version.
  * 
- * @version $Id: ClumpFinder.java,v 1.2 2011/10/23 09:50:28 zaidan Exp $
+ * @version $Id: ClumpFinder.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class ClumpFinder extends Driver implements Clusterer

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/clumpfinder
HighHitDensityDecision.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- HighHitDensityDecision.java	23 Oct 2011 09:50:28 -0000	1.2
+++ HighHitDensityDecision.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -14,7 +14,7 @@
  * Eventually this should be made more flexible and moved outside
  * the "structural" package.
  *
- * @version $Id: HighHitDensityDecision.java,v 1.2 2011/10/23 09:50:28 zaidan Exp $
+ * @version $Id: HighHitDensityDecision.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class HighHitDensityDecision implements DecisionMakerSingle<CalorimeterHit> 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/clumpfinder/kmean
LocalDensityMaximumDecision.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- LocalDensityMaximumDecision.java	23 Oct 2011 09:50:28 -0000	1.2
+++ LocalDensityMaximumDecision.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -12,7 +12,7 @@
  * Eventually this should be made more flexible and moved outside
  * the "structural" package.
  *
- * @version $Id: LocalDensityMaximumDecision.java,v 1.2 2011/10/23 09:50:28 zaidan Exp $
+ * @version $Id: LocalDensityMaximumDecision.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class LocalDensityMaximumDecision implements DecisionMakerSingle<CalorimeterHit>

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
ChargedNeutralFragmentSeparator.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ChargedNeutralFragmentSeparator.java	23 Oct 2011 09:50:28 -0000	1.2
+++ ChargedNeutralFragmentSeparator.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -25,7 +25,7 @@
  *  A class that takes a small cluster or single hit and
  *  tries to decide whether it is charged or neutral.
  *
- * @version $Id: ChargedNeutralFragmentSeparator.java,v 1.2 2011/10/23 09:50:28 zaidan Exp $
+ * @version $Id: ChargedNeutralFragmentSeparator.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class ChargedNeutralFragmentSeparator extends Driver implements DecisionMakerSingle<Cluster> 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
CheatFragmentIdentifier.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- CheatFragmentIdentifier.java	23 Oct 2011 09:50:29 -0000	1.2
+++ CheatFragmentIdentifier.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -8,7 +8,7 @@
 /**
   * A cheating class to determine whether a given cluster is a fragment.
   *
-  * @version $Id: CheatFragmentIdentifier.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+  * @version $Id: CheatFragmentIdentifier.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
   */
 public class CheatFragmentIdentifier implements FragmentIdentifier
 {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
CheatLikelihoodLinkDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- CheatLikelihoodLinkDriver.java	23 Oct 2011 09:50:29 -0000	1.2
+++ CheatLikelihoodLinkDriver.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -20,7 +20,7 @@
  * truth information.
  *
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: CheatLikelihoodLinkDriver.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: CheatLikelihoodLinkDriver.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class CheatLikelihoodLinkDriver extends LikelihoodLinkDriver

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
DropFragments.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- DropFragments.java	23 Oct 2011 09:50:29 -0000	1.2
+++ DropFragments.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -9,7 +9,7 @@
  * An implementation of <code>FragmentMerger</code>.
  * We simply discard every fragment.
  *
- * @version $Id: DropFragments.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: DropFragments.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class DropFragments implements FragmentMerger

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
FragmentHandler.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FragmentHandler.java	23 Oct 2011 09:50:29 -0000	1.2
+++ FragmentHandler.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -19,7 +19,7 @@
  *
  * @see FragmentIdentifier
  * @see FragmentMerger
- * @version $Id: FragmentHandler.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: FragmentHandler.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class FragmentHandler extends Driver

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
FragmentIdentifier.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FragmentIdentifier.java	23 Oct 2011 09:50:29 -0000	1.2
+++ FragmentIdentifier.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -6,7 +6,7 @@
 /**
  * Determine whether a cluster is a fragment or not.
  *
- * @version $Id: FragmentIdentifier.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: FragmentIdentifier.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public interface FragmentIdentifier 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
FragmentIdentifierDecisionMaker.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FragmentIdentifierDecisionMaker.java	23 Oct 2011 09:50:29 -0000	1.2
+++ FragmentIdentifierDecisionMaker.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -10,7 +10,7 @@
  * A wrapper class that allows a FragmentIdentifier to be
  * treated as a DecisionMakerSingle<Cluster>
  *
- * @version $Id: FragmentIdentifierDecisionMaker.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: FragmentIdentifierDecisionMaker.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class FragmentIdentifierDecisionMaker extends Driver implements DecisionMakerSingle<Cluster>

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
FragmentMerger.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FragmentMerger.java	23 Oct 2011 09:50:29 -0000	1.2
+++ FragmentMerger.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -23,7 +23,7 @@
  * <BR> newCluster.addCluster(fragment); // repeated for each fragment to be merged into this cluster
  * </BLOCKQUOTE>
  *
- * @version $Id: FragmentMerger.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: FragmentMerger.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public interface FragmentMerger

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
GenericStructuralDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- GenericStructuralDriver.java	23 Oct 2011 09:50:29 -0000	1.2
+++ GenericStructuralDriver.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -19,7 +19,7 @@
  * track segments. See the process() description for more detail.
  *
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: GenericStructuralDriver.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: GenericStructuralDriver.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public abstract class GenericStructuralDriver extends Driver 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
HaloAssigner.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- HaloAssigner.java	23 Oct 2011 09:50:29 -0000	1.2
+++ HaloAssigner.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -17,7 +17,7 @@
  * Assign hits which are not part of a Cluster to a nearby Cluster.
  *
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: HaloAssigner.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: HaloAssigner.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class HaloAssigner extends Driver

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
LikelihoodFindingStructuralDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- LikelihoodFindingStructuralDriver.java	23 Oct 2011 09:50:29 -0000	1.2
+++ LikelihoodFindingStructuralDriver.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -16,7 +16,7 @@
  * class must be called.
  *
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: LikelihoodFindingStructuralDriver.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: LikelihoodFindingStructuralDriver.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class LikelihoodFindingStructuralDriver extends GenericStructuralDriver 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
LikelihoodLinkDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- LikelihoodLinkDriver.java	23 Oct 2011 09:50:29 -0000	1.2
+++ LikelihoodLinkDriver.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -22,7 +22,7 @@
  * LikelihoodEvaluator.
  *
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: LikelihoodLinkDriver.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: LikelihoodLinkDriver.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class LikelihoodLinkDriver extends GenericStructuralDriver 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
LikelihoodLinkPlotDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- LikelihoodLinkPlotDriver.java	23 Oct 2011 09:50:29 -0000	1.2
+++ LikelihoodLinkPlotDriver.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -15,7 +15,7 @@
  * this class makes some plots evaluating the performance.
  *
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: LikelihoodLinkPlotDriver.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: LikelihoodLinkPlotDriver.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class LikelihoodLinkPlotDriver extends LikelihoodLinkDriver

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
Link.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- Link.java	23 Oct 2011 09:50:29 -0000	1.2
+++ Link.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -8,7 +8,7 @@
  * org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural
  *
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: Link.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: Link.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 class Link {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
SimpleFragmentIdentifier.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- SimpleFragmentIdentifier.java	23 Oct 2011 09:50:29 -0000	1.2
+++ SimpleFragmentIdentifier.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -30,7 +30,7 @@
  * the cluster being tested, it is considered to have a track and therefore
  * to be a primary. 
  *
- * @version $Id: SimpleFragmentIdentifier.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: SimpleFragmentIdentifier.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class SimpleFragmentIdentifier implements FragmentIdentifier

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural
SimpleFragmentMerger.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- SimpleFragmentMerger.java	23 Oct 2011 09:50:29 -0000	1.2
+++ SimpleFragmentMerger.java	11 Apr 2012 15:49:34 -0000	1.3
@@ -14,7 +14,7 @@
  * can be changed by extending this class and over-riding
  * that method.
  *
- * @version $Id: SimpleFragmentMerger.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: SimpleFragmentMerger.java,v 1.3 2012/04/11 15:49:34 zaidan Exp $
  */
 
 public class SimpleFragmentMerger implements FragmentMerger

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural/likelihood
MiscUtilities.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- MiscUtilities.java	27 May 2011 12:01:10 -0000	1.1
+++ MiscUtilities.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -15,7 +15,7 @@
 import org.lcsim.util.swim.Line;
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 public class MiscUtilities
 {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural/likelihood
TrackToTrackDOCA.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- TrackToTrackDOCA.java	23 Oct 2011 09:50:29 -0000	1.2
+++ TrackToTrackDOCA.java	11 Apr 2012 15:49:35 -0000	1.3
@@ -22,7 +22,7 @@
   * four hits (otherwise the direction is not so meaningful).
   *
   * @author Mat Charles <[log in to unmask]>
-  * @version $Id: TrackToTrackDOCA.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+  * @version $Id: TrackToTrackDOCA.java,v 1.3 2012/04/11 15:49:35 zaidan Exp $
   */
 
 public class TrackToTrackDOCA extends ClusterToClusterLikelihoodQuantity

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural/likelihood
TrackToTrackPOCAInCalorimeter.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- TrackToTrackPOCAInCalorimeter.java	23 Oct 2011 09:50:29 -0000	1.2
+++ TrackToTrackPOCAInCalorimeter.java	11 Apr 2012 15:49:35 -0000	1.3
@@ -28,7 +28,7 @@
   * at the start of every event.
   * 
   * @author Mat Charles <[log in to unmask]>
-  * @version $Id: TrackToTrackPOCAInCalorimeter.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+  * @version $Id: TrackToTrackPOCAInCalorimeter.java,v 1.3 2012/04/11 15:49:35 zaidan Exp $
   */
 
 public class TrackToTrackPOCAInCalorimeter extends ClusterToClusterLikelihoodQuantity

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/cluster/structural/likelihood
TrackToTrackSmallestDistanceToPOCA.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- TrackToTrackSmallestDistanceToPOCA.java	23 Oct 2011 09:50:29 -0000	1.2
+++ TrackToTrackSmallestDistanceToPOCA.java	11 Apr 2012 15:49:35 -0000	1.3
@@ -25,7 +25,7 @@
  * four hits (otherwise the direction is not so meaningful).
  * 
  * @author Mat Charles <[log in to unmask]>
- * @version $Id: TrackToTrackSmallestDistanceToPOCA.java,v 1.2 2011/10/23 09:50:29 zaidan Exp $
+ * @version $Id: TrackToTrackSmallestDistanceToPOCA.java,v 1.3 2012/04/11 15:49:35 zaidan Exp $
  */
 
 public class TrackToTrackSmallestDistanceToPOCA extends ClusterToClusterLikelihoodQuantity

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
Shower2D.java added at 1.1
diff -N Shower2D.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ Shower2D.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,36 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.debug;
+
+import java.util.*;
+import hep.physics.vec.*;
+
+public class Shower2D
+{
+    protected List<ShowerBranch2D> m_branches = new Vector<ShowerBranch2D>();
+    protected double m_momentum;
+    protected boolean m_isCharged;
+    protected int m_id;
+
+    public Shower2D()
+    {
+    }
+
+    public double getMomentum() { return m_momentum; }
+    public void setMomentum( double x ) { m_momentum = x; }
+
+    public boolean isCharged() { return m_isCharged; }
+    public void setIsCharged( boolean b ) { m_isCharged = b; }
+
+    public List<ShowerBranch2D> getBranches(){ return m_branches; }
+    public void addBranch(ShowerBranch2D branch){
+	m_branches.add(branch);
+	branch.setMother(this);
+    }
+
+    public int getId() { return m_id; }
+    public void setId( int i ) { m_id = i; }
+
+    boolean m_isSpecial = false;
+    boolean isSpecial(){ return m_isSpecial; }
+    void setSpecial(boolean s){ m_isSpecial = s; }
+
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
ShowerBranch2D.java added at 1.1
diff -N ShowerBranch2D.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ShowerBranch2D.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,138 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.debug;
+
+import java.util.*;
+
+import hep.aida.*;
+import hep.physics.vec.*;
+import hep.physics.particle.properties.*;
+
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.SimCalorimeterHit;
+import org.lcsim.geometry.IDDecoder;
+import org.lcsim.geometry.compact.Subdetector;
+
+
+
+public class ShowerBranch2D
+{
+    protected List<Cluster> m_listClusters = new ArrayList<Cluster>();
+    protected Vector<Cluster> m_vectorClusters = new Vector<Cluster>();
+    protected Map<Integer,List<Cluster>> m_mapLayerToListClusters = new HashMap<Integer,List<Cluster>>();
+    protected Cluster m_lastAddedCluster;
+    protected Hep3Vector m_direction;
+    protected Hep3Vector m_position;
+    protected boolean m_isMip;
+    protected boolean m_isSeed;
+    protected boolean m_isPositivePole;
+    protected boolean m_isNegativePole;
+    protected boolean m_isBlock;
+    protected int m_id;
+
+    protected Shower2D m_mother;
+
+    public ShowerBranch2D()
+    {
+    }
+
+    public void addCluster( Cluster clus )
+    {
+	m_listClusters.add( clus );
+	m_lastAddedCluster = clus;
+	m_vectorClusters.add( clus );
+
+	m_position = new BasicHep3Vector( clus.getPosition() );
+
+	Hep3Vector vSum = new BasicHep3Vector( 0. , 0. , 0. );
+	for( int i = 0 ; i < m_vectorClusters.size() ; i++ )
+	    {
+		Hep3Vector v = null;
+		if( i == 0 ) v = VecOp.unit( new BasicHep3Vector( m_vectorClusters.get(i).getPosition() ) );
+		else
+		    {
+			v = VecOp.sub( new BasicHep3Vector( m_vectorClusters.get(i).getPosition() ) , new BasicHep3Vector( m_vectorClusters.get(i-1).getPosition() ) );
+			v = VecOp.unit( v );
+		    }
+		vSum = VecOp.add( vSum , v );
+	    }
+	m_direction = VecOp.mult( m_vectorClusters.size() , vSum );
+    }
+
+    public Cluster getLastAddedCluster()
+    {
+	return m_lastAddedCluster;
+    }
+
+    public List<Cluster> getClusters()
+    {
+	return m_listClusters;
+    }
+
+    public List<Cluster> getListClusterOnLayer( Integer ii )
+    {
+	return m_mapLayerToListClusters.get( ii );
+    }
+
+    public void setLayerAndCluster( Integer ii , Cluster clus2D )
+    {
+	Set<Integer> setLayer = m_mapLayerToListClusters.keySet();
+	List<Integer> listLayer = new ArrayList<Integer>( setLayer );
+	if( listLayer.contains( ii ) )
+	    {
+		List<Cluster> listClus2D = m_mapLayerToListClusters.get( ii );
+		listClus2D.add( clus2D );
+	    }
+	else
+	    {
+		List<Cluster> listClus2D = new ArrayList<Cluster>();
+		listClus2D.add( clus2D );
+		m_mapLayerToListClusters.put( ii , listClus2D );
+	    }
+    }
+
+    public int getSize()
+    {
+	return m_listClusters.size();
+    }
+
+    public double getEnergy()
+    {
+	double e = 0.;
+	for( Cluster clus : m_listClusters ) e += clus.getEnergy();
+	return e;
+    }
+
+
+    public Hep3Vector getDirection() { return m_direction; }
+    public void setDirection( Hep3Vector v ) { m_direction = v; }
+
+    public Hep3Vector getPosition() { return m_position; }
+    public void setPosition( Hep3Vector v ) { m_position = v; }
+
+    public double getMomentum() { return m_mother.getMomentum(); }
+
+    public boolean isCharged() { return m_mother.isCharged(); }
+
+    public boolean isMip() { return m_isMip; }
+    public void setIsMip( boolean b ) {	m_isMip = b; }
+
+    public boolean isSeed() { return m_isSeed; }
+    public void setIsSeed( boolean b ) { m_isSeed = b; }
+
+    public boolean isPositivePole() { return m_isPositivePole; }
+    public void setIsPositivePole( boolean b ) { m_isPositivePole = b; }
+
+    public boolean isNegativePole() { return m_isNegativePole; }
+    public void setIsNegativePole( boolean b ) { m_isNegativePole = b; }
+
+    public boolean isBlock() { return m_isBlock; }
+    public void setIsBlock( boolean b ) { m_isBlock = b; }
+
+    public int getId() { return m_id; }
+    public void setId( int i ) { m_id = i; }
+
+    public Shower2D getMother(){ return m_mother; }
+    public void setMother(Shower2D mother){ m_mother = mother; }
+
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
DebugParticles.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- DebugParticles.java	23 Oct 2011 09:50:30 -0000	1.1
+++ DebugParticles.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -5,7 +5,7 @@
 import org.lcsim.event.*;
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.util.hitmap.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
DebugPhotonDriver.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- DebugPhotonDriver.java	23 Oct 2011 09:50:30 -0000	1.1
+++ DebugPhotonDriver.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -1,6 +1,6 @@
 package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.debug;
 
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
 import java.util.*; 
 import java.io.IOException; 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
DebugRegionalEoverP.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- DebugRegionalEoverP.java	23 Oct 2011 09:50:30 -0000	1.1
+++ DebugRegionalEoverP.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -4,11 +4,13 @@
 import hep.physics.vec.*;
 import hep.physics.particle.properties.*;
 import org.lcsim.event.*;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
 import org.lcsim.recon.cluster.util.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 public class DebugRegionalEoverP {
 
@@ -75,7 +77,9 @@
 	    tree.addBranch("energyFromMomentumTrueType", "D");
 	    tree.addBranch("energyFromMomentumUncertainty", "D");
 	    tree.addBranch("energyFromCharged", "D");
+	    tree.addBranch("energyFromChargedInEcal", "D");
 	    tree.addBranch("energyFromNeutral", "D");
+	    tree.addBranch("energyFromNeutralInEcal", "D");
 	    tree.addBranch("energyShared", "D");
 	    tree.addBranch("energyTotal", "D");
 	    tree.addBranch("energyFromTrueNeutral", "D");
@@ -113,6 +117,7 @@
 
 	int eventUID = m_bookKeeper.getEvent().hashCode();
 
+	boolean isEventOK = true;
 	for(Set<Shower> showerGroup : showerGroups){
 
 	    int showerGroupSize = showerGroup.size();
@@ -125,7 +130,9 @@
 	    double energyFromTrueMomentum = 0;
 	    double energyFromMomentumTrueType = 0;
 	    Set<Cluster> clustersFromCharged = new HashSet<Cluster>();
+	    Set<Cluster> clustersFromChargedInEcal = new HashSet<Cluster>();
 	    Set<Cluster> clustersFromNeutral = new HashSet<Cluster>();
+	    Set<Cluster> clustersFromNeutralInEcal = new HashSet<Cluster>();
 	    Set<Cluster> clustersShared = new HashSet<Cluster>();
 	    Set<Cluster> clustersAll = new HashSet<Cluster>();
 	    Set<Track> tracksAll = new HashSet<Track>();
@@ -143,9 +150,31 @@
 		    energyFromMomentumTrueType = Math.sqrt(momentum*momentum + trueMass*trueMass);
 		    clustersFromCharged.addAll(shower.getShowerComponents());
 		    tracksAll.addAll(shower.getTracks());
+		    for(Cluster clus : shower.getShowerComponents()){
+			for(CalorimeterHit hit : clus.getCalorimeterHits()){
+			    String detName = hit.getSubdetector().getName();
+			    CalorimeterInformation ci = CalorimeterInformation.instance();
+			    if(detName.equals(ci.getName(CalorimeterType.EM_BARREL)) || 
+			       detName.equals(ci.getName(CalorimeterType.EM_ENDCAP)) ){
+				clustersFromChargedInEcal.add(clus);
+			    }
+			    break;
+			}
+		    }
 		}
 		if(shower.isNeutral()){
 		    clustersFromNeutral.addAll(shower.getShowerComponents());
+		    for(Cluster clus : shower.getShowerComponents()){
+			for(CalorimeterHit hit : clus.getCalorimeterHits()){
+			    String detName = hit.getSubdetector().getName();
+			    CalorimeterInformation ci = CalorimeterInformation.instance();
+			    if(detName.equals(ci.getName(CalorimeterType.EM_BARREL)) || 
+			       detName.equals(ci.getName(CalorimeterType.EM_ENDCAP)) ){
+				clustersFromNeutralInEcal.add(clus);
+			    }
+			    break;
+			}
+		    }
 		}
 	    }
 	    double energyFromMomentumUncertainty = PFAUtil.estimatedEnergyUncertainty(tracksAll, m_bookKeeper.getEvent());
@@ -154,7 +183,9 @@
 	    clustersAll.addAll(clustersFromCharged);
 	    clustersAll.addAll(clustersFromNeutral);
 	    double energyFromCharged = PFAUtil.energy(clustersFromCharged, allSharedClusters, calib);
+	    double energyFromChargedInEcal = PFAUtil.energy(clustersFromChargedInEcal, allSharedClusters, calib);
 	    double energyFromNeutral = PFAUtil.energy(clustersFromNeutral, allSharedClusters, calib);
+	    double energyFromNeutralInEcal = PFAUtil.energy(clustersFromNeutralInEcal, allSharedClusters, calib);
 	    double energyShared = PFAUtil.energy(clustersShared, allSharedClusters, calib);
 	    double energyTotal = PFAUtil.energy(clustersAll, allSharedClusters, calib);
 
@@ -228,6 +259,10 @@
 		}
 	    }
 
+	    if(Math.abs(energyFromCharged - energyFromMomentum) / energyFromMomentumUncertainty > 3){
+		isEventOK = false;
+	    }
+
 	    tree.setBranchValue("eventUID", eventUID);
 	    tree.setBranchValue("showerGroupSize", showerGroupSize);
 	    tree.setBranchValue("showerGroupNCharged", showerGroupNCharged);
@@ -236,7 +271,9 @@
 	    tree.setBranchValue("energyFromMomentumTrueType", energyFromMomentumTrueType);
 	    tree.setBranchValue("energyFromMomentumUncertainty", energyFromMomentumUncertainty);
 	    tree.setBranchValue("energyFromCharged", energyFromCharged);
+	    tree.setBranchValue("energyFromChargedInEcal", energyFromChargedInEcal);
 	    tree.setBranchValue("energyFromNeutral", energyFromNeutral);
+	    tree.setBranchValue("energyFromNeutralInEcal", energyFromNeutralInEcal);
 	    tree.setBranchValue("energyShared", energyShared);
 	    tree.setBranchValue("energyTotal", energyTotal);
 	    tree.setBranchValue("energyFromTrueNeutral", energyFromTrueNeutral);
@@ -252,6 +289,8 @@
 
 	    tree.fill();
 	}
+
+	m_bookKeeper.getEvent().put("isEventOK_"+treeName, new Boolean(isEventOK));
     }
 
     protected Map<Shower, Set<Shower>> groupNearbyShowers(ShowerContainer showerContainer, double angleForGrouping){

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
DebugShowersDriver.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- DebugShowersDriver.java	23 Oct 2011 09:50:30 -0000	1.1
+++ DebugShowersDriver.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -15,7 +15,8 @@
 import org.lcsim.recon.cluster.mipfinder.*;
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.clumpfinder.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.mc.fast.tracking.ReconTrack;
 import org.lcsim.event.base.*;
 import hep.physics.particle.Particle;
@@ -23,17 +24,22 @@
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;
-import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
 import org.lcsim.util.aida.*;
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.geometry.*;
 import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
+import org.lcsim.recon.cluster.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.clumpfinder.kmean.*;
 
 
 public class DebugShowersDriver extends Driver {
 
+    protected DebugUtils m_debugUtils;
+    protected LinkQualityChecker m_LQChecker;
+
     protected EventHeader m_event = null;
     protected DebugUtils m_hitBasedDebugUtils = null;
     protected DebugUtils m_energyBasedDebugUtils = null;
@@ -95,6 +101,10 @@
 
     //
 
+    protected IHistogram1D m_h_score , m_h_score_energyWeighted;
+
+    //
+
     protected IHistogram2D m_h3_bin0_showerNumClusVsPurity_photon;
     protected IHistogram2D m_h3_bin0_showerPosition_photon       , m_h3_bin0_position_photon;
     protected IHistogram2D m_h3_bin0_showerAngleDistance_photon  , m_h3_bin0_angleDistance_photon;
@@ -177,6 +187,10 @@
     /////////
     TTree m_showerDebugTree;
     TTree m_showerLinksTree;
+    TTree m_clusterDebugTree;
+    TTree m_layerDebugTree;
+    TTree m_shower2DDebugTree;
+    TTree m_clus2DDebugTree;
     /////////
 
     public DebugShowersDriver( PFABookKeepingBroker bookKeeper, HelixExtrapolator extrapolator ){
@@ -189,6 +203,14 @@
 
     public DebugShowersDriver( PFABookKeepingBroker bookKeeper, HelixExtrapolator extrapolator , String mcListName, String EcalDigiHitMapName, String HcalDigiHitMapName){
 
+        m_debugUtils = new DebugUtils();
+        m_debugUtils.setMCListName( mcListName );
+        m_debugUtils.setEcalDigiHitMapName( EcalDigiHitMapName );
+        m_debugUtils.setHcalDigiHitMapName( HcalDigiHitMapName );
+        m_debugUtils.setEnergyBased( true );
+
+        m_LQChecker = new DominantParticleBasedLQChecker( m_debugUtils );
+
 	m_hitBasedDebugUtils = new DebugUtils();
 	m_hitBasedDebugUtils.setMCListName(mcListName);
 	m_hitBasedDebugUtils.setEcalDigiHitMapName(EcalDigiHitMapName);
@@ -390,6 +412,10 @@
 	    m_h_combinedEResidual_angle = m_histoFactory.createHistogram2D("combinedEResidual_angle", 100, 0, Math.PI, 100, -10, 10);
 
 
+	    m_h_score = m_histoFactory.createHistogram1D( "score" , 200 , 0.6 , 1.1 );
+	    m_h_score_energyWeighted = m_histoFactory.createHistogram1D( "score_energyWeighted" , 200 , 0.6 , 1.1 );
+
+
 	    for(int iShowerType=0; iShowerType<m_nShowerTypes; iShowerType++){
 		String showerType = m_showerTypes[iShowerType];
 		m_tree.mkdir(showerType);	
@@ -447,7 +473,6 @@
         }
 
 	m_showerDebugTree = new TTree(m_outputFileName+".tree");
-
 	m_showerDebugTree.addBranch("size", "I");
 	m_showerDebugTree.addBranch("isRecoNeutral", "B");
 	m_showerDebugTree.addBranch("energy", "D");
@@ -512,7 +537,6 @@
 	m_showerDebugTree.addBranch( "primaryLikelihood" , "D" );
 
 	m_showerLinksTree = new TTree(m_outputFileName+".links.tree");
-
 	m_showerLinksTree.addBranch("eventUID", "I");
 	m_showerLinksTree.addBranch("baseID", "I");
 	m_showerLinksTree.addBranch("baseSize", "I");
@@ -561,161 +585,1820 @@
 	m_showerLinksTree.addBranch("baseToTargetLikelihoodNeutral", "D");
 	m_showerLinksTree.addBranch( "baseCorePurity" , "D" );
 	m_showerLinksTree.addBranch( "targetCorePurity" , "D" );
+
+	m_clusterDebugTree = new TTree(m_outputFileName+".clusterDebug.tree");
+	m_clusterDebugTree.addBranch( "clusLayerPos" , "I" );
+	m_clusterDebugTree.addBranch( "clusLayerNeg" , "I" );
+	m_clusterDebugTree.addBranch( "clusDistPos"  , "D" );
+	m_clusterDebugTree.addBranch( "clusDistNeg"  , "D" );
+	m_clusterDebugTree.addBranch( "clusPosX"     , "D" );
+	m_clusterDebugTree.addBranch( "clusPosY"     , "D" );
+	m_clusterDebugTree.addBranch( "clusPosZ"     , "D" );
+	m_clusterDebugTree.addBranch( "clusNegX"     , "D" );
+	m_clusterDebugTree.addBranch( "clusNegY"     , "D" );
+	m_clusterDebugTree.addBranch( "clusNegZ"     , "D" );
+	m_clusterDebugTree.addBranch( "clusCenterX"  , "D" );
+	m_clusterDebugTree.addBranch( "clusCenterY"  , "D" );
+	m_clusterDebugTree.addBranch( "clusCenterZ"  , "D" );
+	m_clusterDebugTree.addBranch( "clusEnergy"   , "D" );
+	m_clusterDebugTree.addBranch( "clusPhoton"   , "B" );
+	m_clusterDebugTree.addBranch( "clusGood"     , "B" );
+	m_clusterDebugTree.addBranch( "clusReco"     , "B" );
+
+	m_layerDebugTree = new TTree( m_outputFileName + ".layerDebug.tree" );
+	m_layerDebugTree.addBranch( "hitLayer" , "I" );
+	m_layerDebugTree.addBranch( "hitTrueHit" , "I" );
+	m_layerDebugTree.addBranch( "hitTrueClus" , "I" );
+	m_layerDebugTree.addBranch( "hitReco" , "I" );
+	m_layerDebugTree.addBranch( "hitRecoShower" , "I" );
+	m_layerDebugTree.addBranch( "hitClusIsPhoton" , "B" );
+	//m_layerDebugTree.addBranch( "hitRawEnergy" , "D" );
+	m_layerDebugTree.addBranch( "hitCorrectedEnergy" , "D" );
+	//m_layerDebugTree.addBranch( "hitTime" , "D" );
+	m_layerDebugTree.addBranch( "hitX" , "D" );
+	m_layerDebugTree.addBranch( "hitY" , "D" );
+	m_layerDebugTree.addBranch( "hitZ" , "D" );
+	m_layerDebugTree.addBranch( "hitSeed" , "B" );
+	m_layerDebugTree.addBranch( "hitMip" , "B" );
+
+	m_shower2DDebugTree = new TTree( m_outputFileName + ".shower2DDebug.tree" );
+	m_shower2DDebugTree.addBranch( "shower2DPurity" , "D" );
+	m_shower2DDebugTree.addBranch( "shower2DEfficiency" , "D" );
+
+	m_clus2DDebugTree = new TTree( m_outputFileName + ".clus2DDebug.tree" );
+	m_clus2DDebugTree.addBranch( "clus2DNumberOfHits" , "I" );
+	m_clus2DDebugTree.addBranch( "clus2DLayerNumber" , "I" );
+	m_clus2DDebugTree.addBranch( "clus2DPurity" , "D" );
+	m_clus2DDebugTree.addBranch( "clus2DEnergy" , "D" );
+	m_clus2DDebugTree.addBranch( "clus2DIsPhoton" , "B" );
+	m_clus2DDebugTree.addBranch( "clus2DIsMip" , "B" );
+	m_clus2DDebugTree.addBranch( "clus2DShowerTruthID" , "I" );
+	m_clus2DDebugTree.addBranch( "clus2DShowerID" , "I" );
+	m_clus2DDebugTree.addBranch( "clus2DBranchID" , "I" );
+	m_clus2DDebugTree.addBranch( "clus2DX" , "D" );
+	m_clus2DDebugTree.addBranch( "clus2DY" , "D" );
+	m_clus2DDebugTree.addBranch( "clus2DZ" , "D" );
+
+    }
+
+    public void process(EventHeader event){
+	init();
+
+	m_event = event;
+
+	m_debugUtils.setEventInfo(event);
+	m_hitBasedDebugUtils.setEventInfo(event);
+	m_energyBasedDebugUtils.setEventInfo(event);
+
+	List<StructuralLikelihoodQuantity> vQuantity = m_showerToShowerEval.getLikelihoodQuantities() ;
+
+	for(StructuralLikelihoodQuantity iQuantity : vQuantity) {
+	    if(!(iQuantity instanceof ShowerToShowerLikelihoodQuantity)){
+		throw new AssertionError("Quantity "+iQuantity.getClass().getName()+" does not inherit from ShowerToShowerLikelihoodQuantity");
+	    }
+	    ShowerToShowerLikelihoodQuantity iShowerQuantity = (ShowerToShowerLikelihoodQuantity)iQuantity;
+	    iShowerQuantity.setEventInfo(m_event);
+	    iShowerQuantity.setBookKeepingBroker(m_bookKeeper);
+	}
+	
+	vQuantity = m_primaryShowerEval.getLikelihoodQuantities() ;
+
+	for(StructuralLikelihoodQuantity iQuantity : vQuantity) {
+	    if(!(iQuantity instanceof ShowerToShowerLikelihoodQuantity)){
+		throw new AssertionError("Quantity "+iQuantity.getClass().getName()+" does not inherit from ShowerToShowerLikelihoodQuantity");
+	    }
+	    ShowerToShowerLikelihoodQuantity iShowerQuantity = (ShowerToShowerLikelihoodQuantity)iQuantity;
+	    iShowerQuantity.setEventInfo(m_event);
+	    iShowerQuantity.setBookKeepingBroker(m_bookKeeper);
+	}
+
+	super.process(m_event);
+
+	m_eventCount++;
+	if (m_doCheckpoints && m_eventCount % 50 == 0) {
+	    // Checkpoint
+	    commit();
+	}
     }
 
-    public void process(EventHeader event){
-	init();
+    public void doAnalysis(ShowerContainer showerContainer,
+			   Collection<Cluster> clusters,
+			   List<SharedClusterGroup> allSharedClusters) {
+
+	Variables vars = new Variables();
+	Set<Shower> showers = showerContainer.getShowers();
+	for (Shower shower : showers) {
+	    if( shower.isNeutral() )
+		{
+		    continue;
+		}
+	    Set<Track> tracks = shower.getTracks();
+	    Set<Cluster> showerComponents = shower.getShowerComponents();
+	    vars.showerSize = shower.size();
+	    vars.clusterEnergy = shower.realEnergy();
+	    vars.trackMomentum = shower.scalarMomentum();
+	    double sigma = shower.estimatedEnergyUncertainty();
+	    vars.normalizedResidual = (vars.clusterEnergy-vars.trackMomentum)/sigma;
+	    vars.hitBasedEfficiency = m_hitBasedDebugUtils.quoteEfficiency_T(tracks, showerComponents, allSharedClusters);
+	    vars.hitBasedPurity = m_hitBasedDebugUtils.quotePurity_T(tracks, showerComponents, allSharedClusters);
+	    vars.hitBasedCoreEfficiency = m_hitBasedDebugUtils.quoteEfficiency_T(tracks, showerComponents);
+	    vars.hitBasedCorePurity = m_hitBasedDebugUtils.quotePurity_T(tracks, showerComponents);
+	    vars.energyBasedEfficiency = m_energyBasedDebugUtils.quoteEfficiency_T(tracks, showerComponents, allSharedClusters);
+	    vars.energyBasedPurity = m_energyBasedDebugUtils.quotePurity_T(tracks, showerComponents, allSharedClusters);
+	    vars.energyBasedCoreEfficiency = m_energyBasedDebugUtils.quoteEfficiency_T(tracks, showerComponents);
+	    vars.energyBasedCorePurity = m_energyBasedDebugUtils.quotePurity_T(tracks, showerComponents);
+	    boolean isPunchThrough = false;
+	    if(m_checkSharedHitsForPunchThrough){
+		isPunchThrough = m_punchThroughCheck.isPunchThrough(shower, m_event, allSharedClusters);
+	    }else{
+		isPunchThrough = m_punchThroughCheck.isPunchThrough(shower, m_event);
+	    }
+	    if(shower.isJet()) {
+		if(isPunchThrough){
+		    fillHistograms("PunchThroughJets", vars);
+		}else{
+		    fillHistograms("Jets", vars);
+		}
+	    }else{
+		if(isPunchThrough){
+		    fillHistograms("PunchThroughTracks", vars);
+		}else{
+		    fillHistograms("Tracks", vars);
+		}
+	    }
+	}
+
+	List<Shower> showerList = new Vector<Shower>();
+	showerList.addAll(showerContainer.getShowers());
+	for(int i=0; i<showerList.size(); i++){
+	    Shower shower1 = showerList.get(i);
+	    if(shower1.isNeutral()) continue;
+	    Hep3Vector p1 = shower1.momentum();
+	    double energy1 = shower1.realEnergy();
+	    double sigma1 = shower1.estimatedEnergyUncertainty();
+	    Set<Cluster> clusters1 = shower1.getShowerComponents();
+	    ClusterEnergyCalculator calib = shower1.getEnergyCalculator();
+	    for(int j=i+1; j<showerList.size(); j++){
+		Shower shower2 = showerList.get(j);
+		if(shower2.isNeutral()) continue;
+		Hep3Vector p2 = shower2.momentum();
+		double energy2 = shower2.realEnergy();
+		double sigma2 = shower2.estimatedEnergyUncertainty();
+		Set<Cluster> clusters2 = shower2.getShowerComponents();
+
+		Set<Cluster> combinedShowerComponents = new HashSet<Cluster>();
+		combinedShowerComponents.addAll(clusters1);
+		combinedShowerComponents.addAll(clusters2);
+		Shower combinedShower = ShowerFactory.createShower(calib, allSharedClusters, combinedShowerComponents);
+
+		double angle = Math.acos(VecOp.dot(VecOp.unit(p1), VecOp.unit(p2)));
+		double sumE = energy1 + energy2;
+		double combinedE = combinedShower.realEnergy();
+		double commonE = sumE - combinedE;
+		double combinedP = VecOp.add(p1, p2).magnitude();
+		double combinedSigma = Math.sqrt(sigma1*sigma1 + sigma2*sigma2);
+
+		if(sumE == 0){ throw new AssertionError("Energy sum is zer!!!"); }
+		if(combinedP == 0){ throw new AssertionError("Combined momentum is zero!!!"); }
+		if(combinedSigma == 0){ throw new AssertionError("Combined sigma is zero!!!"); }
+
+		m_h_sharedEnergyFranction_angle.fill(angle, commonE/combinedE);
+		m_h_sumEOverP_angle.fill(angle, sumE/combinedP);
+		m_h_combinedEOverP_angle.fill(angle, combinedE/combinedP);
+		m_h_sumEResidual_angle.fill(angle, (sumE - combinedP)/combinedSigma);
+		m_h_combinedEResidual_angle.fill(angle, (combinedE - combinedP)/combinedSigma);
+	    }
+	}
+
+	Vector<Hep3Vector> trackDirection = new Vector<Hep3Vector>();
+	Vector<Hep3Vector> trackPosition = new Vector<Hep3Vector>();
+	
+	for( Shower shower : showerContainer.getShowers() )
+	    {
+		if( shower.isNeutral() ) continue;
+		
+		Track track = null;
+		Cluster seed = null;
+		Set<Track> tracks = shower.getTracks();
+		for( Track trk : tracks )
+		    {
+			track = trk;
+			seed = shower.getSeed( track );
+			break;
+		    }
+
+		Hep3Vector [] directionPosition = PFAUtil.getTrackExtrapolation( track , seed , 0 , m_extrapolator );
+		
+		Hep3Vector directionTrack = directionPosition[0];
+		Hep3Vector positionTrack = directionPosition[1];
+		
+		trackDirection.add( directionTrack );
+		trackPosition.add( positionTrack );		
+	    }
+
+
+
+	/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+	/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+	/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+
+	// 2D : gammaphi
+
+	m_layerDebugTree.setBranchValue( "hitLayer" , 1111 );
+	m_layerDebugTree.setBranchValue( "hitTrueHit" , 0 );
+	m_layerDebugTree.setBranchValue( "hitTrueClus" , 0 );
+	m_layerDebugTree.setBranchValue( "hitReco" , 0 );
+	m_layerDebugTree.setBranchValue( "hitRecoShower" , 0 );
+	m_layerDebugTree.setBranchValue( "hitClusIsPhoton" , false );
+	//m_layerDebugTree.setBranchValue( "hitRawEnergy" , 0. );					
+	m_layerDebugTree.setBranchValue( "hitCorrectedEnergy" , 0. );
+	//m_layerDebugTree.setBranchValue( "hitTime" , 0. );
+	m_layerDebugTree.setBranchValue( "hitX" , 0. );
+	m_layerDebugTree.setBranchValue( "hitY" , 0. );
+	m_layerDebugTree.setBranchValue( "hitZ" , 0. );
+	m_layerDebugTree.setBranchValue( "hitSeed" , false );
+	m_layerDebugTree.setBranchValue( "hitMip" , false );
+	m_layerDebugTree.fill();
+
+	m_clus2DDebugTree.setBranchValue( "clus2DNumberOfHits" , 0 );					
+	m_clus2DDebugTree.setBranchValue( "clus2DLayerNumber" , 1111 );					
+	m_clus2DDebugTree.setBranchValue( "clus2DPurity" , 0. );					
+	m_clus2DDebugTree.setBranchValue( "clus2DEnergy" , 0. );
+	m_clus2DDebugTree.setBranchValue( "clus2DIsPhoton" , false );					
+	m_clus2DDebugTree.setBranchValue( "clus2DIsMip" , false );
+	m_clus2DDebugTree.setBranchValue( "clus2DShowerTruthID" , 0 );
+	m_clus2DDebugTree.setBranchValue( "clus2DShowerID" , 0 );
+	m_clus2DDebugTree.setBranchValue( "clus2DBranchID" , 0 );
+	m_clus2DDebugTree.setBranchValue( "clus2DX" , 0. );
+	m_clus2DDebugTree.setBranchValue( "clus2DY" , 0. );
+	m_clus2DDebugTree.setBranchValue( "clus2DZ" , 0. );
+	m_clus2DDebugTree.fill();
+
+
+	/*
+	for( Shower shower : showerContainer.getShowers() )
+	    {
+		if( shower.getTracks() == null ) continue;
+
+		for( Track trk : shower.getTracks() )
+		    {
+			for( int ilayer = 0 ; ilayer < 31 ; ilayer++ )
+			    {
+				if( trk instanceof MultipleTrackTrack ) // FixMe!!! in case the direction of the tracks are so different.
+				    {
+					for( Track subTrack : trk.getTracks() )
+					    {
+						HelixExtrapolationResult result = m_extrapolator.performExtrapolation( subTrack );
+						if( result == null ) { System.out.println( "> + > + >   Extrapolatin of the subTrack failed." ); continue; }
+						
+						Hep3Vector positionOfTrack = result.extendToECALEndcapLayer( ilayer );
+						if( positionOfTrack == null ) positionOfTrack = result.extendToECALBarrelLayer( ilayer );
+						
+						if( positionOfTrack == null ) { System.out.println( "> + > + >   Extrapolatin of the subTrack failed 2." ); continue; }
+						
+						m_clus2DDebugTree.setBranchValue( "clus2DNumberOfHits" , 0 );					
+						m_clus2DDebugTree.setBranchValue( "clus2DLayerNumber" , ilayer );					
+						m_clus2DDebugTree.setBranchValue( "clus2DPurity" , 0. );					
+						m_clus2DDebugTree.setBranchValue( "clus2DEnergy" , new BasicHep3Vector( subTrack.getMomentum() ).magnitude() );
+						m_clus2DDebugTree.setBranchValue( "clus2DIsPhoton" , false );					
+						m_clus2DDebugTree.setBranchValue( "clus2DIsMip" , true );
+						m_clus2DDebugTree.setBranchValue( "clus2DShowerTruthID" , 0 );
+						m_clus2DDebugTree.setBranchValue( "clus2DShowerID" , 0 );
+						m_clus2DDebugTree.setBranchValue( "clus2DBranchID" , 0 );
+						m_clus2DDebugTree.setBranchValue( "clus2DX" , positionOfTrack.x() );
+						m_clus2DDebugTree.setBranchValue( "clus2DY" , positionOfTrack.y() );
+						m_clus2DDebugTree.setBranchValue( "clus2DZ" , positionOfTrack.z() );
+						m_clus2DDebugTree.fill();
+
+					    }			
+					
+				    }
+				else
+				    {
+					HelixExtrapolationResult result = m_extrapolator.performExtrapolation( trk );
+					if( result == null ) { System.out.println( "> + > + >   Extrapolatin of the subTrack failed." ); continue; }
+					
+					Hep3Vector positionOfTrack = result.extendToECALEndcapLayer( ilayer );
+					if( positionOfTrack == null ) positionOfTrack = result.extendToECALBarrelLayer( ilayer );
+					
+					if( positionOfTrack == null ) { System.out.println( "> + > + >   Extrapolatin of the subTrack failed 2." ); continue; }
+					
+					m_clus2DDebugTree.setBranchValue( "clus2DNumberOfHits" , 0 );					
+					m_clus2DDebugTree.setBranchValue( "clus2DLayerNumber" , ilayer );					
+					m_clus2DDebugTree.setBranchValue( "clus2DPurity" , 0. );					
+					m_clus2DDebugTree.setBranchValue( "clus2DEnergy" , new BasicHep3Vector( trk.getMomentum() ).magnitude() );
+					m_clus2DDebugTree.setBranchValue( "clus2DIsPhoton" , false );					
+					m_clus2DDebugTree.setBranchValue( "clus2DIsMip" , false );
+					m_clus2DDebugTree.setBranchValue( "clus2DShowerTruthID" , 0 );
+					m_clus2DDebugTree.setBranchValue( "clus2DShowerID" , 0 );
+					m_clus2DDebugTree.setBranchValue( "clus2DBranchID" , 0 );
+					m_clus2DDebugTree.setBranchValue( "clus2DX" , positionOfTrack.x() );
+					m_clus2DDebugTree.setBranchValue( "clus2DY" , positionOfTrack.y() );
+					m_clus2DDebugTree.setBranchValue( "clus2DZ" , positionOfTrack.z() );
+					m_clus2DDebugTree.fill();					
+				    }
+
+			    }
+		    }
+	    }
+	*/
+
+
+	Collection<Cluster> photons = m_bookKeeper.getClusterList( "Photons" );
+	Collection<Cluster> mips = m_bookKeeper.getClusterList( "Mips" );
+	Collection<Cluster> clumps = m_bookKeeper.getClusterList( "Clumps" );
+	Collection<Cluster> blocks = m_bookKeeper.getClusterList( "Blocks" );
+	Collection<Cluster> leftoverHitClusters = m_bookKeeper.getClusterList( "Leftovers" );
+
+	// Map to tag the showers by used in the group of showers or not. The aim is to avoid the use of the shower more than one time.
+	Map<Shower,Boolean> mapShowerToIsUsed = new HashMap<Shower,Boolean>();
+        for( Shower shower : showerContainer.getShowers() ) mapShowerToIsUsed.put( shower , new Boolean(false) );
+
+	// This is the grouping angle of the shower
+	double angleForGrouping = 60. * Math.PI / 180.;
+	Map<Shower,Set<Shower>> mapShowerToSetShowers = groupShowers( showerContainer , angleForGrouping );
+
+
+	// begin check /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+	Map<CalorimeterHit,Integer> mapHitToRecoShower2D = new HashMap<CalorimeterHit,Integer>();
+	int iRecoShower2D = 0;
+
+        for( Shower shower : showerContainer.getShowers() )
+	    {
+		// check if the shower is in the shower container
+		List<Shower> tempListShower = new ArrayList<Shower>( mapShowerToSetShowers.get( shower ) );
+		if( !tempListShower.contains ( shower ) ) System.out.println( "Error!!! In DebugShowersDriver.java No. 348234234" );
+
+		//
+
+		// shower purity
+		Cluster showerCore_ = PFAUtil.makeCombinedCluster( shower.getShowerComponents() );
+		double shower2DPurity = m_debugUtils.quotePurity( showerCore_ );
+			
+		// shower efficiency
+		MCParticle domPart1 = m_debugUtils.quoteDominantParticle( showerCore_ );
+		double shower2DEfficiency = m_debugUtils.quoteEfficiency_P( domPart1 , showerCore_ );
+
+    		m_shower2DDebugTree.setBranchValue( "shower2DPurity" , shower2DPurity );
+    		m_shower2DDebugTree.setBranchValue( "shower2DEfficiency" , shower2DEfficiency );
+		//m_shower2DDebugTree.fill();
+
+		//
+
+		// fill mapHitToRecoShower2D
+		iRecoShower2D++;
+		for( Cluster clus : shower.getShowerComponents() )
+		    for( CalorimeterHit hit : clus.getCalorimeterHits() )
+			mapHitToRecoShower2D.put( hit , new Integer( iRecoShower2D ) );
+	    }
+
+	// end check ///////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+	// Definition of the K-Mean cluster finder
+	KMeanClumpFinder clus2DFinder = new KMeanClumpFinder();
+	
+	// Seed finding 
+	int SDT333 = 0;
+	int SDT553 = 0;
+	int SST = 1;
+	
+	// Clump finding
+	int CDT333 = 0;
+	int CDT553 = 0;
+	int CST = 1;
+	int maxItr = 1;
+	
+	KMeanParameters kMeanParameters = new KMeanParameters( SDT333 , SDT553 , SST , CDT333 , CDT553 , CST , maxItr );
+	clus2DFinder.setParameters( kMeanParameters );
+
+	// Number of the calorimeter layers
+	int totalNumberOfLayers = 71;
+
+
+
+	////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+	////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+	////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+
+	// For loop on the group of showers
+        for( Shower showerMother : showerContainer.getShowers() )
+            {
+		// checking if it is used
+		boolean isUsed = mapShowerToIsUsed.get( showerMother ).booleanValue();
+		if( isUsed ) continue;
+
+		System.out.println( ">>>>>>>>>>>>>>> new group of showers" );
+
+		// tag the MC particle by a number
+		Map<MCParticle,Integer> mapMCtoInt = new HashMap<MCParticle,Integer>();
+		int iMCtoInt = 0;
+
+		// shower id
+		int iShowerId = 1;
+		int iBranchId = 1;
+
+		// seed mip clump 3D ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		List<Cluster> listSeedMip3D = new ArrayList<Cluster>();
+		List<Cluster> listSeedClump3D = new ArrayList<Cluster>();
+		List<Cluster> listNoSeedMip3D = new ArrayList<Cluster>();
+		List<Cluster> listNoSeedClump3D = new ArrayList<Cluster>();
+		for( Shower shower : mapShowerToSetShowers.get( showerMother ) )
+		    for( Cluster clus : shower.getSeeds() )
+			{
+			    //if( !listSeedMip3D.contains( clus ) && mips.contains( clus ) ) listSeedMip3D.add( clus );
+			    //if( !listSeedClump3D.contains( clus ) && ( clumps.contains( clus ) || ( blocks.contains( clus ) ) ) ) listSeedClump3D.add( clus );
+			}
+		for( Shower shower : mapShowerToSetShowers.get( showerMother ) )
+		    for( Cluster clus : shower.getShowerComponents() )
+			{
+			    //if( !listNoSeedMip3D.contains( clus ) && !listSeedMip3D.contains(clus) && mips.contains( clus ) ) listNoSeedMip3D.add( clus );
+			    //if( !listNoSeedClump3D.contains( clus ) && !listSeedClump3D.contains( clus ) && ( clumps.contains( clus ) || blocks.contains( clus ) ) ) listNoSeedClump3D.add( clus );
+			    if( !listNoSeedClump3D.contains( clus ) ) listNoSeedClump3D.add( clus );
+			}
+
+		/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+		// map layer to seed mip clump 3D ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		Map<Integer,Set<CalorimeterHit>> mapLayerToSetSeedMip3DHits = new HashMap<Integer,Set<CalorimeterHit>>();
+		Map<Integer,Set<CalorimeterHit>> mapLayerToSetSeedClump3DHits = new HashMap<Integer,Set<CalorimeterHit>>();
+		Map<Integer,Set<CalorimeterHit>> mapLayerToSetNoSeedMip3DHits = new HashMap<Integer,Set<CalorimeterHit>>();
+		Map<Integer,Set<CalorimeterHit>> mapLayerToSetNoSeedClump3DHits = new HashMap<Integer,Set<CalorimeterHit>>();
+		for( int i = 0 ; i < totalNumberOfLayers ; i++ )
+		    {
+			mapLayerToSetSeedMip3DHits.put( new Integer( i ) , new HashSet<CalorimeterHit>() );
+			mapLayerToSetSeedClump3DHits.put( new Integer( i ) , new HashSet<CalorimeterHit>() );
+			mapLayerToSetNoSeedMip3DHits.put( new Integer( i ) , new HashSet<CalorimeterHit>() );
+			mapLayerToSetNoSeedClump3DHits.put( new Integer( i ) , new HashSet<CalorimeterHit>() );
+		    }
+		/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+		// list of used clusters
+		List<Cluster> usedClusters = new Vector<Cluster>();
+
+		for( Shower shower : mapShowerToSetShowers.get( showerMother ) )
+		    {
+			isUsed = mapShowerToIsUsed.get( shower ).booleanValue();
+			if( isUsed ) continue;
+
+			for( Cluster clus : shower.getShowerComponents() )
+			    {
+				//if( !mips.contains( clus ) && !clumps.contains( clus ) && !blocks.contains( clus ) ) continue;
+
+				if( usedClusters.contains( clus ) ) continue;
+				else usedClusters.add( clus );
+
+				List<CalorimeterHit> hits = clus.getCalorimeterHits();
+
+				for( CalorimeterHit hit : hits )
+				    {
+					IDDecoder id = hit.getIDDecoder();
+					id.setID( hit.getCellID() );
+					int layer = id.getLayer();
+					
+					int layerNumber = layer;
+					if( ci.getName( CalorimeterType.EM_BARREL ).equals( hit.getSubdetector().getName() ) )  layerNumber=layer;
+					if( ci.getName( CalorimeterType.EM_ENDCAP ).equals( hit.getSubdetector().getName() ) )  layerNumber=layer;
+					if( ci.getName( CalorimeterType.HAD_BARREL ).equals( hit.getSubdetector().getName() ) ) layerNumber=layer+m_numberOfLayersEMBarrel;
+					if( ci.getName( CalorimeterType.HAD_ENDCAP ).equals( hit.getSubdetector().getName() ) ) layerNumber=layer+m_numberOfLayersEMEndcap;
+					if( ci.getName( CalorimeterType.MUON_BARREL ).equals( hit.getSubdetector().getName() ) )layerNumber=layer+m_numberOfLayersEMBarrel+m_numberOfLayersHadBarrel;
+
+					Integer ii = new Integer( layerNumber );
+
+					Set<CalorimeterHit> tempSetSeedMip3DHits = mapLayerToSetSeedMip3DHits.get( ii );
+					if( listSeedMip3D.contains( clus ) ) tempSetSeedMip3DHits.add( hit );
+					Set<CalorimeterHit> tempSetSeedClump3DHits = mapLayerToSetSeedClump3DHits.get( ii );
+					if( listSeedClump3D.contains( clus ) ) tempSetSeedClump3DHits.add( hit );
+
+					Set<CalorimeterHit> tempSetNoSeedMip3DHits = mapLayerToSetNoSeedMip3DHits.get( ii );
+					if( listNoSeedMip3D.contains( clus ) ) tempSetNoSeedMip3DHits.add( hit );
+					Set<CalorimeterHit> tempSetNoSeedClump3DHits = mapLayerToSetNoSeedClump3DHits.get( ii );
+					if( listNoSeedClump3D.contains( clus ) ) tempSetNoSeedClump3DHits.add( hit );
+
+					//
+
+					MCParticle pHit = getMCParticleOfHit( hit );
+
+					List<MCParticle> listMCHit = new ArrayList<MCParticle>( mapMCtoInt.keySet() );
+						
+					if( !listMCHit.contains( pHit ) )
+					    {
+						iMCtoInt++;
+						mapMCtoInt.put( pHit , new Integer( iMCtoInt ) );
+					    }
+				    }
+			    }
+
+			Boolean tempBool = mapShowerToIsUsed.put( shower , new Boolean( true ) );
+		    }
+
+
+		// map from layer to seed 2D ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		// map layer to Seed Mip 2D
+		Map<Integer,List<Cluster>> mapLayerToListSeedMip2D = new HashMap<Integer,List<Cluster>>();
+		for( Integer ii : mapLayerToSetSeedMip3DHits.keySet() )
+		    {
+			List<Cluster> listSeedMip2D = new ArrayList<Cluster>();
+			for( Cluster seedMip3D : listSeedMip3D )
+			    {
+				BasicCluster clusSeedMip2D = new BasicCluster();
+				for( CalorimeterHit hit1 : seedMip3D.getCalorimeterHits() )
+				    for( CalorimeterHit hit2 : mapLayerToSetSeedMip3DHits.get( ii ) )
+					if( hit1 == hit2 ) clusSeedMip2D.addHit( hit1 );
+				if( clusSeedMip2D.getCalorimeterHits().size() != 0 ) listSeedMip2D.add( clusSeedMip2D );
+			    }
+			mapLayerToListSeedMip2D.put( ii , listSeedMip2D );
+		    }
+		// map layer to Seed Clump 2D
+		Map<Integer,List<Cluster>> mapLayerToListSeedClump2D = new HashMap<Integer,List<Cluster>>();
+		for( Integer ii : mapLayerToSetSeedClump3DHits.keySet() )
+		    {
+			List<Cluster> listSeedClump2D = new ArrayList<Cluster>();
+			for( Cluster seedClump3D : listSeedClump3D )
+			    {
+				BasicCluster clusSeedClump2D = new BasicCluster();
+				for( CalorimeterHit hit1 : seedClump3D.getCalorimeterHits() )
+				    for( CalorimeterHit hit2 : mapLayerToSetSeedClump3DHits.get( ii ) )
+					if( hit1 == hit2 ) clusSeedClump2D.addHit( hit1 );
+				if( clusSeedClump2D.getCalorimeterHits().size() != 0 ) listSeedClump2D.add( clusSeedClump2D );
+			    }
+			mapLayerToListSeedClump2D.put( ii , listSeedClump2D );
+		    }
+		// map layer to No Seed Mip 2D
+		Map<Integer,List<Cluster>> mapLayerToListNoSeedMip2D = new HashMap<Integer,List<Cluster>>();
+		for( Integer ii : mapLayerToSetNoSeedMip3DHits.keySet() )
+		    {
+			List<Cluster> listNoSeedMip2D = new ArrayList<Cluster>();
+			for( Cluster noSeedMip3D : listNoSeedMip3D )
+			    {
+				BasicCluster clusNoSeedMip2D = new BasicCluster();
+				for( CalorimeterHit hit1 : noSeedMip3D.getCalorimeterHits() )
+				    for( CalorimeterHit hit2 : mapLayerToSetNoSeedMip3DHits.get( ii ) )
+					if( hit1 == hit2 ) clusNoSeedMip2D.addHit( hit1 );
+				if( clusNoSeedMip2D.getCalorimeterHits().size() != 0 ) listNoSeedMip2D.add( clusNoSeedMip2D );
+			    }
+			mapLayerToListNoSeedMip2D.put( ii , listNoSeedMip2D );
+		    }
+		// map layer to No Seed Clump 2D
+		Map<Integer,List<Cluster>> mapLayerToListNoSeedClump2D = new HashMap<Integer,List<Cluster>>();
+		for( int iLayer = 0 ; iLayer < totalNumberOfLayers ; iLayer++ )
+		    {
+			Integer ii = new Integer( iLayer );
+			List<CalorimeterHit> listCalHits = new ArrayList<CalorimeterHit>( mapLayerToSetNoSeedClump3DHits.get( ii ) );			
+			List<Cluster> listClus2D = clus2DFinder.createClusters( listCalHits );
+			mapLayerToListNoSeedClump2D.put( ii , listClus2D );
+		    }
+		/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+		// map from seed 2D to seed 3D //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		// map from seed mip 2D to seed mip 3D
+		Map<Cluster,Cluster> mapSeedMip2DToSeedMip3D = new HashMap<Cluster,Cluster>();
+		for( Cluster seedMip3D : listSeedMip3D )
+		    {
+			CalorimeterHit negaPol = PFAUtil.getNegativePole( seedMip3D );
+			for( Integer ii : mapLayerToListSeedMip2D.keySet() )
+			    for( Cluster seedMip2D : mapLayerToListSeedMip2D.get( ii ) )
+				if( seedMip2D.getCalorimeterHits().contains( negaPol ) ) mapSeedMip2DToSeedMip3D.put( seedMip2D , seedMip3D );
+		    }
+		// map from seed clump 2D to seed clump 3D
+		Map<Cluster,Cluster> mapSeedClump2DToSeedClump3D = new HashMap<Cluster,Cluster>();
+		for( Cluster seedClump3D : listSeedClump3D )
+		    {
+			CalorimeterHit negaPol = PFAUtil.getNegativePole( seedClump3D );
+			for( Integer ii : mapLayerToListSeedClump2D.keySet() )
+			    for( Cluster seedClump2D : mapLayerToListSeedClump2D.get( ii ) )
+				if( seedClump2D.getCalorimeterHits().contains( negaPol ) ) mapSeedMip2DToSeedMip3D.put( seedClump2D , seedClump3D );
+		    }
+
+
+
+		// list of positive poles ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		List<Cluster> listCluster2DPositivePole = new ArrayList<Cluster>();
+		for( Cluster seedMip3D : listSeedMip3D )
+		    {
+			CalorimeterHit posiPol = PFAUtil.getPositivePole( seedMip3D );
+			for( Integer ii : mapLayerToListSeedMip2D.keySet() )
+			    for( Cluster seedMip2D : mapLayerToListSeedMip2D.get( ii ) )
+				if( seedMip2D.getCalorimeterHits().contains( posiPol ) ) listCluster2DPositivePole.add( seedMip2D );
+		    }
+		for( Cluster noSeedMip3D : listNoSeedMip3D )
+		    {
+			CalorimeterHit posiPol = PFAUtil.getPositivePole( noSeedMip3D );
+			for( Integer ii : mapLayerToListNoSeedMip2D.keySet() )
+			    for( Cluster noSeedMip2D : mapLayerToListNoSeedMip2D.get( ii ) )
+				if( noSeedMip2D.getCalorimeterHits().contains( posiPol ) ) listCluster2DPositivePole.add( noSeedMip2D );
+		    }
+		for( Cluster noSeedClump3D : listNoSeedClump3D )
+		    {
+			CalorimeterHit posiPol = PFAUtil.getPositivePole( noSeedClump3D );
+			for( Integer ii : mapLayerToListNoSeedClump2D.keySet() )
+			    for( Cluster noSeedClump2D : mapLayerToListNoSeedClump2D.get( ii ) )
+				if( noSeedClump2D.getCalorimeterHits().contains( posiPol ) ) listCluster2DPositivePole.add( noSeedClump2D );
+		    }
+		/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+
+		// list of negative poles ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		List<Cluster> listCluster2DNegativePole = new ArrayList<Cluster>();
+		for( Cluster seedMip3D : listSeedMip3D )
+		    {
+			CalorimeterHit negaPol = PFAUtil.getNegativePole( seedMip3D );
+			for( Integer ii : mapLayerToListSeedMip2D.keySet() )
+			    for( Cluster seedMip2D : mapLayerToListSeedMip2D.get( ii ) )
+				if( seedMip2D.getCalorimeterHits().contains( negaPol ) ) listCluster2DNegativePole.add( seedMip2D );
+		    }
+		for( Cluster noSeedMip3D : listNoSeedMip3D )
+		    {
+			CalorimeterHit negaPol = PFAUtil.getNegativePole( noSeedMip3D );
+			for( Integer ii : mapLayerToListNoSeedMip2D.keySet() )
+			    for( Cluster noSeedMip2D : mapLayerToListNoSeedMip2D.get( ii ) )
+				if( noSeedMip2D.getCalorimeterHits().contains( negaPol ) ) listCluster2DNegativePole.add( noSeedMip2D );
+		    }
+		for( Cluster noSeedClump3D : listNoSeedClump3D )
+		    {
+			CalorimeterHit negaPol = PFAUtil.getNegativePole( noSeedClump3D );
+			for( Integer ii : mapLayerToListNoSeedClump2D.keySet() )
+			    for( Cluster noSeedClump2D : mapLayerToListNoSeedClump2D.get( ii ) )
+				if( noSeedClump2D.getCalorimeterHits().contains( negaPol ) ) listCluster2DNegativePole.add( noSeedClump2D );
+		    }
+		/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+
+		// map beetween mip 2D and mip 3D , between seed clus2D and seed clus3D /////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		Map<Cluster,Cluster> mapSolid2DToSolid3D = new HashMap<Cluster,Cluster>();
+		for( Cluster mip3D : listSeedMip3D ) {
+		    for( Integer ii : mapLayerToListSeedMip2D.keySet() ) {
+			boolean isBreak = false;
+			for( Cluster mip2D : mapLayerToListSeedMip2D.get( ii ) ) {
+			    for( CalorimeterHit hit1 : mip3D.getCalorimeterHits() ) {
+				for( CalorimeterHit hit2 : mip2D.getCalorimeterHits() ) {
+				    if( hit1 == hit2 ) {
+					mapSolid2DToSolid3D.put( mip2D , mip3D );
+					isBreak = true;
+				    } if( isBreak ) break;
+				} if( isBreak ) break;
+			    } if( isBreak ) break;
+			}
+		    }
+		}
+		for( Cluster mip3D : listNoSeedMip3D ) {
+		    for( Integer ii : mapLayerToListNoSeedMip2D.keySet() ) {
+			boolean isBreak = false;
+			for( Cluster mip2D : mapLayerToListNoSeedMip2D.get( ii ) ) {
+			    for( CalorimeterHit hit1 : mip3D.getCalorimeterHits() ) {
+				for( CalorimeterHit hit2 : mip2D.getCalorimeterHits() ) {
+				    if( hit1 == hit2 ) {
+					mapSolid2DToSolid3D.put( mip2D , mip3D );
+					isBreak = true;
+				    } if( isBreak ) break;
+				} if( isBreak ) break;
+			    } if( isBreak ) break;
+			}
+		    }
+		}
+		for( Cluster clump3D : listSeedClump3D ) {
+		    for( Integer ii : mapLayerToListSeedClump2D.keySet() ) {
+			boolean isBreak = false;
+			for( Cluster clump2D : mapLayerToListSeedClump2D.get( ii ) ) {
+			    for( CalorimeterHit hit1 : clump3D.getCalorimeterHits() ) {
+				for( CalorimeterHit hit2 : clump2D.getCalorimeterHits() ) {
+				    if( hit1 == hit2 ) {
+					mapSolid2DToSolid3D.put( clump2D , clump3D );
+					isBreak = true;
+				    } if( isBreak ) break;
+				} if( isBreak ) break;
+			    } if( isBreak ) break;
+			}
+		    }
+		}
+
+		/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+		
+
+
+		///// shower building ///// gammapi
+
+		List<Shower2D> listShower2D = new ArrayList<Shower2D>();
+
+		for( int iLayer = 0 ; iLayer < totalNumberOfLayers ; iLayer++ )
+		    {
+			Integer ii = new Integer( iLayer );
+
+			for( Shower2D shower2D : listShower2D )
+			    for( ShowerBranch2D branch2D : shower2D.getBranches() )
+				branch2D.setIsBlock( false );
+
+
+			if( mapLayerToListSeedMip2D.get( ii ).size() == 0  && mapLayerToListSeedClump2D.get( ii ).size() == 0 &&
+			    mapLayerToListNoSeedMip2D.get( ii ).size() == 0 && mapLayerToListNoSeedClump2D.get( ii ).size() == 0 ) continue;
+
+			// 2D clusters on this layer
+			// seed mip
+			List<Cluster> listSeedMip2DOnThisLayer = new ArrayList<Cluster>();
+			if( mapLayerToListSeedMip2D.get( ii ).size() != 0 )
+			    listSeedMip2DOnThisLayer.addAll( mapLayerToListSeedMip2D.get( ii ) );
+			// seed clump
+			List<Cluster> listSeedClump2DOnThisLayer = new ArrayList<Cluster>();
+			if( mapLayerToListSeedClump2D.get( ii ).size() != 0 )
+			    listSeedClump2DOnThisLayer.addAll( mapLayerToListSeedClump2D.get( ii ) );
+			// no seed mip
+			List<Cluster> listNoSeedMip2DOnThisLayer = new ArrayList<Cluster>();
+			if( mapLayerToListNoSeedMip2D.get( ii ).size() != 0 )
+			    listNoSeedMip2DOnThisLayer.addAll( mapLayerToListNoSeedMip2D.get( ii ) );
+			// seed clump
+			List<Cluster> listNoSeedClump2DOnThisLayer = new ArrayList<Cluster>();
+			if( mapLayerToListNoSeedClump2D.get( ii ).size() != 0 )
+			  listNoSeedClump2DOnThisLayer.addAll( mapLayerToListNoSeedClump2D.get( ii ) );
+
+
+
+			// joining isolated hits to other clusters //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+			List<Cluster> listNoSeedClump2DOnThisLayerSingleHit = new ArrayList<Cluster>(); 
+			List<Cluster> listNoSeedClump2DOnThisLayerMultiHit = new ArrayList<Cluster>(); 
+
+			for( Cluster clus2D : listNoSeedClump2DOnThisLayer )
+			    {
+				if( clus2D.getCalorimeterHits().size() == 1 ) listNoSeedClump2DOnThisLayerSingleHit.add( clus2D );
+				else listNoSeedClump2DOnThisLayerMultiHit.add( clus2D );
+			    }
+
+			for( Cluster clus2D1 : listNoSeedClump2DOnThisLayerSingleHit )
+			    {
+				Hep3Vector vPos1 = new BasicHep3Vector( clus2D1.getPosition() );
+				double distMin = 1.e+99;
+				BasicCluster clus2Dmin = null;
+				for( Cluster clus2D : listNoSeedClump2DOnThisLayerMultiHit )
+				    for( CalorimeterHit hit : clus2D.getCalorimeterHits() )
+					{
+					    Hep3Vector vPos2 = new BasicHep3Vector( hit.getPosition() );
+					    Hep3Vector vDelta = VecOp.sub( vPos1 , vPos2 );
+					    double mDelta = vDelta.magnitude();
+					    if( mDelta < distMin )
+						{
+						    distMin = mDelta;
+						    clus2Dmin = (BasicCluster)clus2D;
+						}
+					}
+				if( clus2Dmin != null )
+				    if( distMin < 3. * cellSize( ii.intValue() ) ) //  3 is a number to optimize
+					{ 
+					    for( CalorimeterHit hit : clus2D1.getCalorimeterHits() ) clus2Dmin.addHit( hit );
+					    listNoSeedClump2DOnThisLayer.remove( clus2D1 );
+					    mapLayerToListNoSeedClump2D.get( ii ).remove( clus2D1 );
+					}
+			    }
+
+			/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+
+
+			// main building ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+			// use the privous showers to include the clusters
+
+			for( Shower2D shower2D : listShower2D )
+			    for( ShowerBranch2D branch2D : shower2D.getBranches() )
+				{
+				    if( branch2D.isPositivePole() ) continue;
+				    Cluster baseClus2D = branch2D.getLastAddedCluster();
+				    
+				    // keep the 3D mips
+				    if( branch2D.isMip() && branch2D.isSeed() )
+					{
+					    List<Cluster> listSeedMip2DOnThisLayerTemp = new ArrayList<Cluster>();
+					    listSeedMip2DOnThisLayerTemp.addAll( listSeedMip2DOnThisLayer );
+					    
+					    for( Cluster clus2D : listSeedMip2DOnThisLayerTemp )
+						if( mapSolid2DToSolid3D.get( clus2D ) == mapSolid2DToSolid3D.get( baseClus2D ) )
+						    {
+							branch2D.addCluster( clus2D );
+							branch2D.setLayerAndCluster( ii , clus2D );
+							listSeedMip2DOnThisLayer.remove( clus2D );
+							branch2D.setIsBlock( true );
+						    }
+					}
+				    else if( branch2D.isMip() && !branch2D.isSeed() )
+					{
+					    List<Cluster> listNoSeedMip2DOnThisLayerTemp = new ArrayList<Cluster>();
+					    listNoSeedMip2DOnThisLayerTemp.addAll( listNoSeedMip2DOnThisLayer );
+					    
+					    for( Cluster clus2D : listNoSeedMip2DOnThisLayerTemp )
+						if( mapSolid2DToSolid3D.get( clus2D ) == mapSolid2DToSolid3D.get( baseClus2D ) )
+						    {
+							branch2D.addCluster( clus2D );
+							branch2D.setLayerAndCluster( ii , clus2D );
+							listNoSeedMip2DOnThisLayer.remove( clus2D );
+							branch2D.setIsBlock( true );
+						    }
+					}
+				    else if( !branch2D.isMip() && branch2D.isSeed() )
+					{
+					    List<Cluster> listSeedClump2DOnThisLayerTemp = new ArrayList<Cluster>();
+					    listSeedClump2DOnThisLayerTemp.addAll( listSeedClump2DOnThisLayer );
+					    
+					    for( Cluster clus2D : listSeedClump2DOnThisLayerTemp )
+						if( mapSolid2DToSolid3D.get( clus2D ) == mapSolid2DToSolid3D.get( baseClus2D ) )
+						    {
+							branch2D.addCluster( clus2D );
+							branch2D.setLayerAndCluster( ii , clus2D );
+							listSeedClump2DOnThisLayer.remove( clus2D );
+							branch2D.setIsBlock( true );
+						    }
+					}
+				}
+			
+			//Collections.sort( listNoSeedClump2DOnThisLayer , new EnergySort() );
+	   
+			// first iteration // gaga
+			
+			Map<Cluster, List<ShowerBranch2D>> mapNoSeedClump2DToShowerBranchCandidates = new HashMap<Cluster, List<ShowerBranch2D>>();
+			Map<ShowerBranch2D, List<Cluster>> mapShowerBranchToNoSeedClump2DCandidates = new HashMap<ShowerBranch2D, List<Cluster>>();
+
+			
+
+			/*
+			// find candidates for associations
+
+			for( Cluster clus2D : listNoSeedClump2DOnThisLayer )
+			    {
+				double proximityCut = 2. * cellSize( ii ); //// To optimize
+
+				for( Shower2D shower2D : listShower2D )
+				    for( ShowerBranch2D branch2D : shower2D.getBranches() )
+					{
+					    if( branch2D.isBlock() ) continue; //toto
+
+					    Hep3Vector vdir = branch2D.getDirection();
+					    Cluster lastAdded = branch2D.getLastAddedCluster();
+					    Hep3Vector vPos = new BasicHep3Vector( lastAdded.getPosition() );
+					    
+					    double proximity = 1.e+99;
+					    for( CalorimeterHit hit : clus2D.getCalorimeterHits() )
+						{
+						    Hep3Vector hitRelPos = VecOp.sub( new BasicHep3Vector( hit.getPosition() ) , vPos );
+						    double dot = VecOp.dot( VecOp.unit(vdir) , hitRelPos );
+						    double dist = Math.sqrt( hitRelPos.magnitude()*hitRelPos.magnitude() - dot*dot );
+						    if( dist < proximity ) proximity = dist;
+						}
+					    
+					    if( proximity < proximityCut )
+						{
+						    List<ShowerBranch2D> showersForClus = mapNoSeedClump2DToShowerBranchCandidates.get( clus2D );
+						    if( showersForClus == null )
+							{
+							    showersForClus = new Vector<ShowerBranch2D>();
+							    mapNoSeedClump2DToShowerBranchCandidates.put( clus2D , showersForClus );
+							}
+						    showersForClus.add( branch2D );
+						    
+						    List<Cluster> clustersForShower = mapShowerBranchToNoSeedClump2DCandidates.get( branch2D );
+						    if( clustersForShower == null )
+							{
+							    clustersForShower = new Vector<Cluster>();
+							    mapShowerBranchToNoSeedClump2DCandidates.put( branch2D , clustersForShower );
+							}
+						    clustersForShower.add( clus2D );
+						}
+					}
+			    }
+
+			// ambiguity solving //gaga
+
+
+			for( Cluster clus2D : listNoSeedMip2DOnThisLayer ) //toto
+			    if( listCluster2DNegativePole.contains( clus2D ) )
+				listNoSeedClump2DOnThisLayer.add( clus2D );
+
[truncated at 1000 lines; 1445 more skipped]

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
DebugTrackSeedMatchingDriver.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- DebugTrackSeedMatchingDriver.java	23 Oct 2011 09:50:30 -0000	1.1
+++ DebugTrackSeedMatchingDriver.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -2,7 +2,7 @@
 
 import java.io.IOException;
 import java.util.*; 
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
 import org.lcsim.util.*;
 import org.lcsim.event.*;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
DebugTrackToClusterSpecialCases.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- DebugTrackToClusterSpecialCases.java	23 Oct 2011 09:50:30 -0000	1.1
+++ DebugTrackToClusterSpecialCases.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -12,7 +12,7 @@
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.geometry.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import java.io.IOException;
 
 
@@ -65,7 +65,7 @@
 	
 	m_LQChecker = new DominantParticleBasedLQChecker( m_debugUtils );
 
-	m_eval = new LikelihoodEvaluatorWrapper("structuralPFA/likelihood.bin");
+	m_eval = new LikelihoodEvaluatorWrapper("structuralPFA/linkLikelihood.bin");
 
 	m_inputTrackListName = inputTrackList;
 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/debug
DebugUtils.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- DebugUtils.java	23 Oct 2011 09:50:30 -0000	1.1
+++ DebugUtils.java	11 Apr 2012 15:49:35 -0000	1.2
@@ -7,7 +7,7 @@
 import org.lcsim.util.hitmap.*;
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.mc.fast.tracking.*;
 import org.lcsim.event.base.*;
 import org.lcsim.recon.tracking.seedtracker.SeedTrack;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
AmbiguousTrackToClusterMapMaker.java added at 1.1
diff -N AmbiguousTrackToClusterMapMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ AmbiguousTrackToClusterMapMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,118 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*; 
+import hep.physics.vec.*;
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
+
+public class AmbiguousTrackToClusterMapMaker extends TrackToClusterMapMaker {
+    protected HelixExtrapolator m_findCluster;
+    public AmbiguousTrackToClusterMapMaker(HelixExtrapolator findCluster, String inputTrackList, String outputMap, String outputUnmatchedTrackList) {
+	super(inputTrackList, outputMap, outputUnmatchedTrackList);
+	m_findCluster = findCluster;
+    }
+
+    protected Map<String,String> m_mapInputNameToMatchedOutputName = new HashMap<String,String>();
+    protected Map<String,String> m_mapInputNameToUnmatchedOutputName = new HashMap<String,String>();
+    public void addInputList(String inputName, String matchedOutputName, String unmatchedOutputName) {
+	m_mapInputNameToMatchedOutputName.put(inputName, matchedOutputName);
+	m_mapInputNameToUnmatchedOutputName.put(inputName, unmatchedOutputName);
+    }
+
+    protected Map<Track,Cluster> makeMap(EventHeader event) {
+	// Read in inputs
+	List<Track> trackList = event.get(Track.class, m_inputTrackListName);
+	Map<String, List<Cluster>> inputLists = new HashMap<String, List<Cluster>>();
+	for (String str : m_mapInputNameToMatchedOutputName.keySet()) {
+	    List<Cluster> currentList = event.get(Cluster.class, str);
+	    inputLists.put(str, currentList);
+	}
+	if (inputLists.size() != m_mapInputNameToMatchedOutputName.size()) { throw new AssertionError("Book-keeping error"); }
+	if (inputLists.size() != m_mapInputNameToUnmatchedOutputName.size()) { throw new AssertionError("Book-keeping error"); }
+
+	// Set up matching
+	LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher(m_findCluster);
+	LocalHelixExtrapolationTrackClusterMatcher genMatch = new LocalHelixExtrapolationTrackClusterMatcher(m_findCluster);
+	DualActionTrackClusterMatcher dualMatch = new DualActionTrackClusterMatcher(mipMatch, genMatch);
+	mipMatch.process(event);
+	genMatch.process(event);
+	List<Cluster> allMatchableClusters = new Vector<Cluster>();
+	for (List<Cluster> inputList : inputLists.values()) {
+	    allMatchableClusters.addAll(inputList);
+	}
+
+	// Do matching
+	Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
+	Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
+	for (Track tr : trackList) {
+	    Cluster matchedCluster = dualMatch.matchTrackToCluster(tr, allMatchableClusters);
+	    if (matchedCluster != null) {
+		// Found a match
+		// Optionally, handle these cases:
+		//   * Match is to a teeny cluster piece (leftoverHitClusters) but there is structure nearby inside same DTree
+		//   * Match is to a photon (try to split up)
+		//   * Match is to a cluster with E>>p (try to split up)
+		// ... but those don't really apply here (they aren't MIPs)
+		tracksMatchedToClusters.put(tr, matchedCluster);
+		List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
+		if (clusTrList == null) { 
+		    clusTrList = new Vector<Track>(); 
+		    clustersMatchedToTracks.put(matchedCluster, clusTrList); 
+		}
+		clusTrList.add(tr);
+	    }
+	}
+
+	// In this case, we don't forbid ambiguous matches where more than one track points to a single cluster.
+	// But we do need to identify them. For now, describe them as a MultipleTrackTrack. We may want to
+	// revisit this later.
+	Map<Track,Cluster> outputMap = new HashMap<Track,Cluster>();
+	Set<Track> matchedTracks = new HashSet<Track>();
+	for (Cluster clus : clustersMatchedToTracks.keySet()) {
+	    if (clus == null) { throw new AssertionError("Null cluster!"); }
+	    List<Track> tracksOfMatchedClus = clustersMatchedToTracks.get(clus);
+	    if (tracksOfMatchedClus == null) { throw new AssertionError("Book-keeping error!"); }
+	    if (tracksOfMatchedClus.size()==0) {
+		throw new AssertionError("Book-keeping error!");
+	    } else if (tracksOfMatchedClus.size()==1) {
+		// Unique match -- OK
+		Track tr = tracksOfMatchedClus.get(0);
+		outputMap.put(tr, clus);
+	    } else {
+		// Ambiguous match -- also OK
+		Track mergedTrack = new MultipleTrackTrack(tracksOfMatchedClus);
+		outputMap.put(mergedTrack, clus);
+	    }
+	    matchedTracks.addAll(tracksOfMatchedClus);
+	}
+	
+	// Identify unmatched tracks
+	List<Track> unmatchedTracks = new Vector<Track>();
+	unmatchedTracks.addAll(trackList);
+	unmatchedTracks.removeAll(matchedTracks);
+
+	// Separate out lists of matched & unmatched clusters
+	for (String str : m_mapInputNameToMatchedOutputName.keySet()) {
+	    List<Cluster> inputList = inputLists.get(str);
+	    List<Cluster> outputListMatched = new Vector<Cluster>();
+	    List<Cluster> outputListUnmatched = new Vector<Cluster>();
+	    String matchedOutputName = m_mapInputNameToMatchedOutputName.get(str);
+	    String unmatchedOutputName = m_mapInputNameToUnmatchedOutputName.get(str);
+	    for (Cluster clus : inputList) {
+		if (outputMap.values().contains(clus)) {
+		    outputListMatched.add(clus);
+		} else {
+		    outputListUnmatched.add(clus);
+		}
+	    }
+	    event.put(matchedOutputName, outputListMatched);
+	    event.put(unmatchedOutputName, outputListUnmatched);
+	}
+
+	// All done
+	event.put(m_outputUnmatchedTrackListName, unmatchedTracks);
+	return outputMap;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
CheatHelixTrackClusterMatcher.java added at 1.1
diff -N CheatHelixTrackClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheatHelixTrackClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,46 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.Track;
+import org.lcsim.event.MCParticle;
+import hep.physics.particle.Particle;
+
+/**
+ * Attempt to match a Track to a Cluster, based on the intercept point
+ * on the ECAL inner surface.
+ *
+ * Works the same as SimpleTrackClusterMatcher, except that helix
+ * extrapolation uses truth information instead of track parameters.
+ *
+ * @version $Id: CheatHelixTrackClusterMatcher.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ */
+
+public class CheatHelixTrackClusterMatcher extends SimpleTrackClusterMatcher
+{
+    /** Simple constructor. */
+    public CheatHelixTrackClusterMatcher() {
+	super();
+    }
+    /**
+     * Simple constructor, setting minimum distance from track
+     * intecept point to cluster.
+     */
+    public CheatHelixTrackClusterMatcher(double cut) {
+	super(cut);
+    }
+
+    protected HelixSwimmer createSwimmer(Track tr) {
+	// Use same method as Steve, taking info from MC truth.
+	Particle particle = null;
+	if (tr instanceof org.lcsim.mc.fast.tracking.ReconTrack) {
+	    particle = ((org.lcsim.mc.fast.tracking.ReconTrack)(tr)).getMCParticle();
+	}
+	if (tr instanceof org.lcsim.event.base.BaseTrackMC) {
+	    particle = ((org.lcsim.event.base.BaseTrackMC)(tr)).getMCParticle();
+	}
+	// ... then feeding it into the swimmer:
+	HelixSwimmer swimmer = new HelixSwimmer(m_fieldStrength[2]);
+	swimmer.setTrack(particle.getMomentum(), particle.getOrigin(), ((int)(particle.getCharge())));
+	return swimmer;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
CheatHelixTrackMIPClusterMatcher.java added at 1.1
diff -N CheatHelixTrackMIPClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheatHelixTrackMIPClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,34 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.MCParticle;
+import hep.physics.particle.Particle;
+import org.lcsim.event.Track;
+
+/**
+ * Attempt to match a Track to a Cluster, based on the intercept point
+ * on the ECAL inner surface.
+ *
+ * Works the same as SimpleTrackMIPClusterMatcher, except that helix
+ * extrapolation uses truth information instead of track parameters.
+ *
+ * @version $Id: CheatHelixTrackMIPClusterMatcher.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ */
+
+public class CheatHelixTrackMIPClusterMatcher extends SimpleTrackMIPClusterMatcher
+{
+    protected HelixSwimmer createSwimmer(Track tr) {
+	// Use same method as Steve, taking info from MC truth.
+	Particle particle = null;
+	if (tr instanceof org.lcsim.mc.fast.tracking.ReconTrack) {
+	    particle = ((org.lcsim.mc.fast.tracking.ReconTrack)(tr)).getMCParticle();
+	}
+	if (tr instanceof org.lcsim.event.base.BaseTrackMC) {
+	    particle = ((org.lcsim.event.base.BaseTrackMC)(tr)).getMCParticle();
+	}
+	// ... then feeding it into the swimmer:
+	HelixSwimmer swimmer = new HelixSwimmer(m_fieldStrength[2]);
+	swimmer.setTrack(particle.getMomentum(), particle.getOrigin(), ((int)(particle.getCharge())));
+	return swimmer;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
CheatTrackClusterMatcher.java added at 1.1
diff -N CheatTrackClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheatTrackClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,396 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.Track;
+import org.lcsim.util.*;
+import org.lcsim.mc.fast.tracking.ReconTrack;
+import org.lcsim.recon.ztracking.cheater.CheatTrack;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.SimCalorimeterHit;
+import hep.physics.particle.Particle;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
+import org.lcsim.geometry.Detector;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Subdetector;
+
+public class CheatTrackClusterMatcher extends Driver implements TrackClusterMatcher
+{
+    String m_mcListName;
+    protected CalorimeterInformation ci;
+    protected Subdetector emb;
+    protected Subdetector eme;
+
+    public CheatTrackClusterMatcher(String mcList) {
+	m_mcListName = mcList;
+    }
+    
+    protected EventHeader m_event;
+    public void process(EventHeader event) {
+	m_event = event;
+	initGeometry(event);
+    }
+
+    // Main interface
+    public Cluster matchTrackToCluster(Track tr, List<Cluster> clusters) 
+    {
+	// Need truth info for this track
+        Particle truth = null;
+        if (tr instanceof ReconTrack) {
+            ReconTrack truthTrack = (ReconTrack) (tr);
+            truth = truthTrack.getMCParticle(); // returns a Particle
+        } else if (tr instanceof CheatTrack) {
+            CheatTrack truthTrack = (CheatTrack) (tr);
+            truth = truthTrack.getMCParticle(); // returns a MCParticle
+        } else {
+            throw new AssertionError("Don't know how to extract cheat information from this track of class "+tr.getClass().getName());
+        }
+	if (truth == null) {
+	    // Track has no truth information
+	    throw new AssertionError("CheatTrackClusterMatcher called for Track with no truth information");
+        }
+	MCParticle mctruth = (MCParticle) (truth);
+
+	if (m_debug) { System.out.println("DEBUG: Attempting to match a track with p="+new BasicHep3Vector(tr.getMomentum()).magnitude()+" (from an MCParticle of type "+mctruth.getType()+" and p="+mctruth.getMomentum().magnitude()+") to a list of "+clusters.size()+" clusters."); }
+
+	// Need truth info for whole event
+	List<MCParticle> mcList = m_event.get(MCParticle.class, m_mcListName);
+
+	// Look up truth parents. If the tracks and MC list use the same truth
+	// list then this should be a 1:1 mapping.
+	List<MCParticle> truthParentsInList = findParentsInList(mctruth, mcList);
+
+	// Check which clusters contributions from the MC parents:
+	Map<Cluster, List<CalorimeterHit>> clustersWithContributions = new HashMap<Cluster,List<CalorimeterHit>>();
+        for (Cluster clus : clusters) {
+            List<CalorimeterHit> hits = findContributedHits(clus, truthParentsInList, mcList);
+            if (hits.size()>0) {
+                clustersWithContributions.put(clus,hits);
+            }
+        }
+
+	// How many clusters did we match up with?
+	int nMatchedClusters = clustersWithContributions.keySet().size();
+	if (m_debug) { System.out.println("DEBUG: Based on truth information, matched track to "+nMatchedClusters+" clusters."); }
+	if (nMatchedClusters == 0) {
+	    // No match -- for example, if track didn't reach the
+	    // calorimeters.
+	    if (m_debug) { System.out.println("DEBUG: No matched clusters => return NULL."); }
+	    return null;
+	} else if (nMatchedClusters == 1) {
+	    // Unique/exact match
+	    Cluster match = clustersWithContributions.keySet().iterator().next();
+	    if (m_debug) { System.out.println("DEBUG: Unique match => return cluster with "+match.getCalorimeterHits().size()+" hits."); }
+	    return match;
+	} else {
+	    // Non-unique match. Now we need to be cleverer...
+	    Set<Cluster> matchedClusters = clustersWithContributions.keySet();
+	    if (m_debug) { System.out.println("DEBUG: Looking at "+matchedClusters.size()+" matched clusters to see which ones work best..."); }
+	    // Look at innermost ECAL hits.
+	    //  * If there are NO clusters with a hit in either/both
+	    //    of the first two layers, return whichever cluster has
+	    //    the innermost hit (not being too fussy if >1 cluster
+	    //    has the same innermost layer since it's a bad match
+	    //    in any case).
+	    //  * If there is ONE cluster with a hit in either/both
+	    //    of the first two layers, return that cluster.
+	    //  * If there are TWO OR MORE clusters with a hit in either/both
+	    //    of the first two layers, think some more.
+	    Cluster clusterWithInnermostHitLayer = null;
+	    CalorimeterHit globalInnermostHitInECAL = null;
+	    List<Cluster> clustersWithHitsInFirstTwoLayers = new Vector<Cluster>();
+	    for (Cluster clus : matchedClusters) {
+		List<CalorimeterHit> hits = clustersWithContributions.get(clus);
+		CalorimeterHit innermostHitInECAL = findInnermostHitInECAL(hits);
+		if (innermostHitInECAL != null) {
+		    // There is an ECAL innermost hit...
+		    int layer = getVLayer(innermostHitInECAL);
+		    // Check if in first two layers
+		    if (layer < 2) {
+			clustersWithHitsInFirstTwoLayers.add(clus);
+		    }
+		    // Check if is current innermost cluster
+		    if (clusterWithInnermostHitLayer == null || layer < getVLayer(globalInnermostHitInECAL)) {
+			globalInnermostHitInECAL = innermostHitInECAL;
+			clusterWithInnermostHitLayer = clus;
+		    }
+		}
+		if (m_debug) {
+		    String printme = new String("DEBUG: Scanned a cluster with "+hits.size()+" hits: ");
+		    if (innermostHitInECAL != null) {
+			printme += " Has ECAL hit(s). Innermost hit in layer ";
+			printme += getVLayer(innermostHitInECAL);
+			if (getVLayer(innermostHitInECAL) < 2) {
+			    printme += " which is in first two layers";
+			}
+			if (clusterWithInnermostHitLayer == clus) {
+			    printme += " so this is the innermost cluster (global innermost hit layer="+getVLayer(globalInnermostHitInECAL)+")";
+			}
+		    } else {
+			printme += " No ECAL hits.";
+		    }
+		    System.out.println(printme);
+		}
+	    }
+	    if (clustersWithHitsInFirstTwoLayers.size()==0) {
+		// No clusters with hits in first two layers => return innermost
+		// (This can be null if no clusters had any ECAL hits -- that's OK.)
+		if (m_debug) { System.out.println("DEBUG: Didn't find any clusters with hits in first two layers => returning innermost with "+clusterWithInnermostHitLayer.getCalorimeterHits().size()+" hits"); }
+		return clusterWithInnermostHitLayer;
+	    } else if (clustersWithHitsInFirstTwoLayers.size()==1) {
+		// Exactly one sensible match. It should be both the innermost
+		// cluster and the only cluster with hit(s) in first two ECAL layers.
+		if (clustersWithHitsInFirstTwoLayers.get(0) != clusterWithInnermostHitLayer) { throw new AssertionError("BUG"); }
+		if (clusterWithInnermostHitLayer == null) { throw new AssertionError("BUG"); }
+		if (m_debug) { System.out.println("DEBUG: Found only one cluster with a hit in either/both of first two layers => returning that one with "+clusterWithInnermostHitLayer.getCalorimeterHits().size()+" hits"); }
+		return clusterWithInnermostHitLayer;
+	    } else {
+		// Ugly case: >1 sensible match
+		if (m_debug) {
+		    System.out.println("DEBUG: For track with p="+Math.sqrt(tr.getPX()*tr.getPX()+tr.getPY()*tr.getPY()+tr.getPZ()*tr.getPZ())+", there are "+clustersWithHitsInFirstTwoLayers.size()+" matched clusters with hits in first 2 ECAL layers.");
+		    System.out.println("DEBUG: MCParticle is a "+mctruth.getPDGID()+" with momentum "+mctruth.getMomentum().magnitude());
+		    System.out.println("DEBUG: Here are the truthParentsInList:");
+		    for (MCParticle part : truthParentsInList) {
+			System.out.println("DEBUG: A "+part.getPDGID()+" with momentum "+part.getMomentum().magnitude());
+		    }
+		}
+		// Make a HelixSwimmer to propagate the track
+		HelixSwimmer swimmer = new HelixSwimmer(m_fieldStrength[2]);
+		swimmer.setTrack(tr);
+		// Try swimming to the barrel:
+		double  alphaBarrel = swimToBarrel(swimmer);
+		boolean validBarrel = false;
+		// Try swimming to the endcap:
+		double  alphaEndcap = swimToEndcap(swimmer);
+		boolean validEndcap = false;
+		// Find intercept point
+		double alpha = Double.NaN; 
+		if (isValidBarrelIntercept(swimmer, alphaBarrel)) {
+		    alpha = alphaBarrel;
+		    validBarrel = true;
+		} else if (isValidEndcapIntercept(swimmer, alphaEndcap)) {
+		    alpha = alphaEndcap;
+		    validEndcap = true;
+		}
+		Hep3Vector trackPoint = null;
+		if (Double.isNaN(alpha)) {
+		    // Track extrapolation failed
+		    // Not much hope here... just pick the one with the most matched hits.
+		    if (m_debug) { System.out.println("DEBUG: Track extrapolation to ECAL failed."); }
+		    Cluster bestMatch = null;
+		    for (Cluster clus : clustersWithHitsInFirstTwoLayers) {
+			List<CalorimeterHit> hits = clustersWithContributions.get(clus);
+			int nMatchedHits = hits.size();
+			if (bestMatch==null || nMatchedHits > clustersWithContributions.get(bestMatch).size()) {
+			    bestMatch = clus;
+			}
+		    }
+		    if (m_debug) { System.out.println("DEBUG: Returning a cluster with "+clustersWithContributions.get(bestMatch).size()+" / "+bestMatch.getCalorimeterHits().size()+" hits matched as my best guess."); }
+		    return bestMatch;
+		} else {
+		    trackPoint = swimmer.getPointAtDistance(alpha);
+		    if (m_debug) { System.out.println("DEBUG: Extrapolated track; Track intercept point at ("+trackPoint.x()+","+trackPoint.y()+","+trackPoint.z()+")"); }
+		}
+		// Now find the nearest one...
+		Cluster nearestClusterToInterceptPoint = null;
+		CalorimeterHit nearestHitToInterceptPoint = null;
+		for (Cluster clus : clustersWithHitsInFirstTwoLayers) {
+		    List<CalorimeterHit> hits = clustersWithContributions.get(clus);
+		    CalorimeterHit innermostHitInECAL = findInnermostHitInECAL(hits);
+		    if (m_debug) { System.out.println("DEBUG: Cluster with "+hits.size()+" / "+clus.getCalorimeterHits().size()+" hits matched, and first ECAL hit in layer "+getVLayer(innermostHitInECAL)+" of "+innermostHitInECAL.getSubdetector().getName()); }
+		    for (CalorimeterHit hit : hits) {
+			int layer = getVLayer(hit);
+			if (layer==0 || layer==1) {
+			    Subdetector subdet = hit.getSubdetector();
+			    if (subdet == emb || subdet == eme) {
+				// EM -- OK
+				double distance = proximity(trackPoint, hit);
+				if (m_debug) { System.out.println("DEBUG:    Hit in ECAL layer "+layer+" distance from intercept point: "+distance); }
+				if (nearestHitToInterceptPoint==null || distance < proximity(trackPoint, nearestHitToInterceptPoint)) {
+				    if (m_debug) { System.out.println("DEBUG: This is the new nearest hit"); }
+				    nearestHitToInterceptPoint = hit;
+				    nearestClusterToInterceptPoint = clus;
+				}
+			    }
+			}
+		    }
+		}
+		if (nearestClusterToInterceptPoint==null) { throw new AssertionError("Inconsistency"); }
+		if (m_debug) { System.out.println("DEBUG: Nearest cluster has proximity of "+proximity(trackPoint, nearestHitToInterceptPoint)+" and "+nearestClusterToInterceptPoint.getCalorimeterHits().size()+" hits. Returning it."); }
+		return nearestClusterToInterceptPoint;
+	    }
+	}
+
+    }
+
+
+    // Utility routines
+    // ----------------
+
+    protected CalorimeterHit findInnermostHitInECAL(Cluster clus) {
+	return findInnermostHitInECAL(clus.getCalorimeterHits());
+    }
+
+    protected CalorimeterHit findInnermostHitInECAL(List<CalorimeterHit> hits) {
+	CalorimeterHit innermostHit = null;
+	for (CalorimeterHit hit : hits) {
+	    int layer = getVLayer(hit);
+	    Subdetector subdet = hit.getSubdetector();	    
+            if ( ! subdet.isCalorimeter() ) { throw new AssertionError("Cluster hit outside calorimeter"); }
+            if (subdet == emb || subdet == eme) {
+                // EM -- OK
+		if (innermostHit==null || getVLayer(innermostHit)>layer) {
+		    innermostHit = hit;
+		}
+	    }
+	}
+	return innermostHit;
+    }
+
+    protected int getVLayer(CalorimeterHit hit) {
+	if (hit==null) { throw new AssertionError("hit is null"); }
+	org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
+	id.setID(hit.getCellID());
+	int layer = id.getVLayer();
+	return layer;
+    }
+
+    protected List<CalorimeterHit> findContributedHits(Cluster clus, List<MCParticle> relevantParticles, List<MCParticle> allParticles) {
+        Vector<CalorimeterHit> output = new Vector<CalorimeterHit>();
+        for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+            boolean relevantContributionToHit = false;
+            Set<MCParticle> particles = findMCParticles(hit, allParticles);
+            for (MCParticle part : particles) {
+                if (relevantParticles.contains(part)) {
+                    relevantContributionToHit = true;
+                    break;
+                }
+            }
+            if (relevantContributionToHit) {
+                output.add(hit);
+            }
+        }
+        return output;
+    }
+
+    protected Set<MCParticle> findMCParticles(CalorimeterHit hit, List<MCParticle> mcList) {
+        if ( ! (hit instanceof SimCalorimeterHit) ) {
+            throw new AssertionError("Non-simulated hit!");
+        } else {
+            SimCalorimeterHit simHit = (SimCalorimeterHit) (hit);
+            Set<MCParticle> contributingParticlesFromList = new HashSet<MCParticle>();
+            int nContributingParticles = simHit.getMCParticleCount();
+            for (int i=0; i<nContributingParticles; i++) {
+                MCParticle part = simHit.getMCParticle(i);
+                List<MCParticle> parentsInList = findParentsInList(part, mcList);
+                contributingParticlesFromList.addAll(parentsInList);
+            }
+            return contributingParticlesFromList;
+        }
+    }
+
+    protected List<MCParticle> findParentsInList(MCParticle part, List<MCParticle> mcList) {
+        List<MCParticle> outputList = new Vector<MCParticle>();
+        if (mcList.contains(part)) {
+            // Already in there
+            outputList.add(part);
+        } else {
+            // Not in there -- recurse up through parents
+            List<MCParticle> parents = part.getParents();
+            if (parents.size()==0) {
+                // Ran out of options -- add nothing and return below
+            } else {
+                for (MCParticle parent : parents) {
+                    List<MCParticle> ancestorsInList = findParentsInList(parent, mcList);
+                    outputList.addAll(ancestorsInList);
+                }
+            }
+        }
+        return outputList;
+    }   
+
+    protected double proximity(Hep3Vector point, CalorimeterHit hit) {
+	Hep3Vector hitPosition = new BasicHep3Vector(hit.getPosition());
+	double distance = VecOp.sub(hitPosition, point).magnitude();
+	return distance;
+    }
+
+    protected double swimToBarrel(HelixSwimmer swimmer) {
+	// Look for a hit in the first layer of the ECAL barrel
+	return swimmer.getDistanceToRadius(m_ECAL_barrel_r);
+    }
+    protected double swimToEndcap(HelixSwimmer swimmer) {
+	// Look for a hit in the first layer of the ECAL endcap
+	double distanceToEndcap1 = swimmer.getDistanceToZ(m_ECAL_endcap_z);
+	double distanceToEndcap2 = swimmer.getDistanceToZ(-m_ECAL_endcap_z);
+	if (distanceToEndcap1>0) {
+	    return distanceToEndcap1;
+	} else {
+	    return distanceToEndcap2;
+	}
+    }
+    protected boolean isValidBarrelIntercept(HelixSwimmer swimmer, double alpha) {
+	// Must have -m_ECAL_barrel_z <= z <= +m_ECAL_barrel_z (within errors)
+	double uncertainty = 0.0;
+	Hep3Vector intercept = swimmer.getPointAtDistance(alpha);
+	double z = intercept.z();
+	boolean zInRange = (z >= m_ECAL_barrel_zmin-uncertainty && z <= m_ECAL_barrel_zmax+uncertainty);
+	return zInRange;
+    }
+    protected boolean isValidEndcapIntercept(HelixSwimmer swimmer, double alpha) {
+	// Must have m_ECAL_endcap_rmin <= r <= m_ECAL_endcap_rmax (within errors)
+	double uncertainty = 0.0;
+	Hep3Vector intercept = swimmer.getPointAtDistance(alpha);
+	double r = Math.sqrt(intercept.x()*intercept.x() + intercept.y()*intercept.y());
+	boolean rInRange = (r >= m_ECAL_endcap_rmin-uncertainty && r <= m_ECAL_endcap_rmax+uncertainty);
+	return rInRange;
+    }
+
+    public void initGeometry(EventHeader event) 
+    {
+        if(!m_init)
+        {
+            if(ci == null)
+            {
+                ci = CalorimeterInformation.instance();
+                emb = ci.getSubdetector(CalorimeterType.EM_BARREL);
+                eme = ci.getSubdetector(CalorimeterType.EM_ENDCAP);
+            }
+            m_ECAL_barrel_zmin = ci.getZMin(CalorimeterType.EM_BARREL);
+            m_ECAL_barrel_zmax = ci.getZMax(CalorimeterType.EM_BARREL);
+            m_ECAL_barrel_r = emb.getLayering().getDistanceToLayerSensorMid(0);
+            m_ECAL_endcap_z = eme.getLayering().getDistanceToLayerSensorMid(0);
+            m_ECAL_endcap_rmin = ci.getRMin(CalorimeterType.EM_ENDCAP);
+            m_ECAL_endcap_rmax = ci.getRMax(CalorimeterType.EM_ENDCAP);
+            double[] zero = {0, 0, 0};
+            m_fieldStrength = event.getDetector().getFieldMap().getField(zero);
+            m_init = true;
+            if (m_debug) {
+                System.out.println(this.getClass().getName()+": Init: ECAL barrel zmin="+m_ECAL_barrel_zmin);
+                System.out.println(this.getClass().getName()+": Init: ECAL barrel zmax="+m_ECAL_barrel_zmax);
+                System.out.println(this.getClass().getName()+": Init: ECAL barrel r="+m_ECAL_barrel_r);
+                System.out.println(this.getClass().getName()+": Init: ECAL endcap z="+m_ECAL_endcap_z);
+                System.out.println(this.getClass().getName()+": Init: ECAL endcap rmin="+m_ECAL_endcap_rmin);
+                System.out.println(this.getClass().getName()+": Init: ECAL endcap rmax="+m_ECAL_endcap_rmax);
+            }
+        }
+    }
+
+    public void setDebug(boolean debug) { m_debug = debug; }
+
+    protected boolean m_init = false;
+    protected double m_ECAL_barrel_zmin;
+    protected double m_ECAL_barrel_zmax;
+    protected double m_ECAL_barrel_r;
+    protected double m_ECAL_endcap_z;
+    protected double m_ECAL_endcap_rmin;
+    protected double m_ECAL_endcap_rmax;
+    protected boolean m_debug = false;
+    protected double[] m_fieldStrength;
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
DualActionTrackClusterMatcher.java added at 1.1
diff -N DualActionTrackClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ DualActionTrackClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,55 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import org.lcsim.event.*;
+import org.lcsim.recon.cluster.mipfinder.*;
+
+/** 
+  * A utility class for matching tracks to clusters.
+  * 
+  * This class doesn't do the clustering itself -- instead, it takes two other
+  * TrackClusterMatchers and delegates to them. It first tries to match the
+  * track to a MIP cluster using the first TrackClusterMatcher; if that fails,
+  * it will try to match the track to any cluster using the second
+  * TrackClusterMatcher.
+  *
+  * For now, MIPs are identified by checking whether the Cluster is an instance
+  * of org.lcsim.recon.cluster.mipfinder.MIPCluster
+  *
+  * @author Mat Charles
+  * @version $Id: DualActionTrackClusterMatcher.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+  */
+
+public class DualActionTrackClusterMatcher implements TrackClusterMatcher {
+
+    TrackClusterMatcher m_mipTrackClusterMatcher;
+    TrackClusterMatcher m_genericTrackClusterMatcher;
+
+    /** Constructor: Supply the two TrackClusterMatchers that the matching is delegated to. */
+    public DualActionTrackClusterMatcher(TrackClusterMatcher mipTrackClusterMatcher, TrackClusterMatcher genericTrackClusterMatcher) {
+	m_mipTrackClusterMatcher = mipTrackClusterMatcher;
+	m_genericTrackClusterMatcher = genericTrackClusterMatcher;
+    }
+
+    /** Main interface. */
+    public Cluster matchTrackToCluster(Track tr, List<Cluster> clusters) {
+	// First try the MIPs...
+	List<Cluster> mipClusters = new Vector<Cluster>();
+	for (Cluster clus : clusters) {
+            // This is a bit of an ugly cross-check... would be better to be given a list
+            // of clusters, or to pull one down from the event by name.
+	    if (clus instanceof MIPCluster) {
+		mipClusters.add(clus);
+	    }
+	}
+	
+	Cluster matchedMIP = m_mipTrackClusterMatcher.matchTrackToCluster(tr, mipClusters);
+	if (matchedMIP != null) {
+	    return matchedMIP;
+	}
+	 
+	// That didn't work -- now try generic match
+	Cluster matchedClus = m_genericTrackClusterMatcher.matchTrackToCluster(tr, clusters);
+	return matchedClus;
+    }     
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
FlexibleHelixExtrapolator.java added at 1.1
diff -N FlexibleHelixExtrapolator.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ FlexibleHelixExtrapolator.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,81 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import hep.physics.vec.*;
+import java.util.*;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.Track;
+import org.lcsim.event.base.BaseTrackMC;
+
+public class FlexibleHelixExtrapolator extends HelixExtrapolator
+{
+    private LocalHelixExtrapolator m_LocalHelixExtrapolator = null;
+    private TrackHelixExtrapolator m_TrackHelixExtrapolator = null;
+    private HelixExtrapolator m_currentLiveExtrapolator = null;
+
+    public FlexibleHelixExtrapolator() {
+	super();
+	m_LocalHelixExtrapolator = new LocalHelixExtrapolator();
+	m_TrackHelixExtrapolator = new TrackHelixExtrapolator();
+    }
+
+    // Main interface
+
+    // This is where the track is specified -- decide at this point which
+    // kind of extrapolator to use.
+    public HelixExtrapolationResult performExtrapolation(Track tr) {
+	if (tr == null) {
+	    // Null track -- blank everything and return failure
+	    m_currentLiveExtrapolator = null;
+	    return null;
+	} else if (tr instanceof BaseTrackMC || tr instanceof org.lcsim.mc.fast.tracking.ReconTrack) {
+	    m_currentLiveExtrapolator = m_LocalHelixExtrapolator;
+	} else {
+	    m_currentLiveExtrapolator = m_TrackHelixExtrapolator;
+	}
+	return m_currentLiveExtrapolator.performExtrapolation(tr);
+    }
+
+    // Main interface routines
+    protected Hep3Vector getInterceptPoint() {
+	return m_currentLiveExtrapolator.getInterceptPoint();
+    }
+    protected Hep3Vector getTangent() {
+	return m_currentLiveExtrapolator.getTangent();
+    }
+    protected Hep3Vector getTangent(Hep3Vector v) {
+	return m_currentLiveExtrapolator.getTangent(v);
+    }
+    protected Hep3Vector extendToEndcapLayer(int layer, Vector<Double> endcap_layering_z, double endcap_rmin, double endcap_rmax, int nsides ) {
+	return m_currentLiveExtrapolator.extendToEndcapLayer(layer, endcap_layering_z,endcap_rmin, endcap_rmax, nsides);
+    }
+    protected Hep3Vector extendToBarrelLayer(int layer, Vector<Double> barrel_layering_r, double barrel_zmin, double barrel_zmax, int nsides ) {
+	return m_currentLiveExtrapolator.extendToBarrelLayer(layer, barrel_layering_r, barrel_zmin, barrel_zmax, nsides);
+    }
+
+    // Other things that need to be propagated to the extrapolator:
+    public void useFCAL(boolean use) {
+	super.useFCAL(use);
+	m_LocalHelixExtrapolator.useFCAL(use);
+	m_TrackHelixExtrapolator.useFCAL(use);
+    }
+
+    public void process(EventHeader event) {
+	super.process(event);
+	m_LocalHelixExtrapolator.process(event);
+	m_TrackHelixExtrapolator.process(event);
+    }
+
+    public void initGeometry(EventHeader event) {
+	super.initGeometry(event);
+	m_LocalHelixExtrapolator.initGeometry(event);
+	m_TrackHelixExtrapolator.initGeometry(event);
+    }
+
+    public void setCutSeparation(double cutSeparation) {
+	super.setCutSeparation(cutSeparation);
+	m_LocalHelixExtrapolator.setCutSeparation(cutSeparation);
+	m_TrackHelixExtrapolator.setCutSeparation(cutSeparation);
+    }
+
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
HelixExtrapolationResult.java added at 1.1
diff -N HelixExtrapolationResult.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ HelixExtrapolationResult.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,77 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import hep.physics.vec.*;
+import java.util.*;
+import org.lcsim.util.swim.HelixSwimmer;
+
+public class HelixExtrapolationResult
+{
+    HelixExtrapolator m_internalExtrapolator;
+    protected HelixExtrapolationResult(HelixExtrapolator extrap) {
+	m_internalExtrapolator = extrap;
+    }
+
+    // Interface
+    public Hep3Vector getInterceptPoint() {
+	return m_internalExtrapolator.getInterceptPoint();
+    }
+    public Hep3Vector getTangent() {
+	return m_internalExtrapolator.getTangent();
+    }
+    public Hep3Vector getTangent(Hep3Vector v) {
+	return m_internalExtrapolator.getTangent(v);
+    }
+    public Hep3Vector extendToEndcapLayer(int layer, Vector<Double> endcap_layering_z, double endcap_rmin, double endcap_rmax, int nsides ) {
+	return m_internalExtrapolator.extendToEndcapLayer(layer, endcap_layering_z, endcap_rmin, endcap_rmax, nsides);
+    }
+    public Hep3Vector extendToBarrelLayer(int layer, Vector<Double> barrel_layering_r, double barrel_zmin, double barrel_zmax, int nsides ) {
+	return m_internalExtrapolator.extendToBarrelLayer(layer, barrel_layering_r, barrel_zmin, barrel_zmax, nsides );
+    }
+    public Long extendToECALLayerAndFindCell(int layer) {
+	return m_internalExtrapolator.extendToECALLayerAndFindCell(layer);
+    }
+    public Long extendToHCALLayerAndFindCell(int layer) {
+	return m_internalExtrapolator.extendToHCALLayerAndFindCell(layer);
+    }
+    public Long extendToMCALLayerAndFindCell(int layer) {
+	return m_internalExtrapolator.extendToMCALLayerAndFindCell(layer);
+    }
+    public Hep3Vector extendToECALLayer(int layer) {
+	return m_internalExtrapolator.extendToECALLayer(layer);
+    }
+    public Hep3Vector extendToHCALLayer(int layer) {
+	return m_internalExtrapolator.extendToHCALLayer(layer);
+    }
+    public Hep3Vector extendToMCALLayer(int layer) {
+	return m_internalExtrapolator.extendToMCALLayer(layer);
+    }
+    public Hep3Vector extendToECALBarrelLayer(int layer) {
+	return m_internalExtrapolator.extendToECALBarrelLayer(layer);
+    }
+    public Hep3Vector extendToECALEndcapLayer(int layer) {
+	return m_internalExtrapolator.extendToECALEndcapLayer(layer);
+    }
+    public Hep3Vector extendToHCALBarrelLayer(int layer) {
+	return m_internalExtrapolator.extendToHCALBarrelLayer(layer);
+    }
+    public Hep3Vector extendToHCALEndcapLayer(int layer) {
+	return m_internalExtrapolator.extendToHCALEndcapLayer(layer);
+    }
+    public Hep3Vector extendToMCALBarrelLayer(int layer) {
+	return m_internalExtrapolator.extendToMCALBarrelLayer(layer);
+    }
+    public Hep3Vector extendToMCALEndcapLayer(int layer) {
+	return m_internalExtrapolator.extendToMCALEndcapLayer(layer);
+    }
+    public HelixSwimmer getSwimmer()
+    {
+        if(m_internalExtrapolator instanceof TrackHelixExtrapolator)
+            return ((TrackHelixExtrapolator) (m_internalExtrapolator)).m_swimmer;
+        return null;
+    }
+    public void setSwimmer(HelixSwimmer hs)
+    {
+        if(m_internalExtrapolator instanceof TrackHelixExtrapolator)
+           ((TrackHelixExtrapolator) (m_internalExtrapolator)).m_swimmer = hs;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
HelixExtrapolator.java added at 1.1
diff -N HelixExtrapolator.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ HelixExtrapolator.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,362 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.geometry.Detector;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.Track;
+import org.lcsim.util.Driver;
+import org.lcsim.geometry.IDDecoder;
+import org.lcsim.geometry.compact.Subdetector;
+import org.lcsim.geometry.subdetector.*;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+
+abstract public class HelixExtrapolator extends Driver
+{
+
+    protected boolean m_init = false;
+    protected double m_ECAL_barrel_zmin;
+    protected double m_HCAL_barrel_zmin;
+    protected double m_MCAL_barrel_zmin;
+    protected double m_ECAL_barrel_zmax;
+    protected double m_HCAL_barrel_zmax;
+    protected double m_MCAL_barrel_zmax;
+    protected double m_ECAL_barrel_r;
+    protected double m_HCAL_barrel_r;
+    protected double m_MCAL_barrel_r;
+    protected double m_ECAL_endcap_z;
+    protected double m_FCAL_endcap_z;
+    protected double m_HCAL_endcap_z;
+    protected double m_MCAL_endcap_z;
+    protected int m_ECAL_barrel_nsides;
+    protected int m_HCAL_barrel_nsides;
+    protected int m_MCAL_barrel_nsides;
+    protected int m_ECAL_endcap_nsides;
+    protected int m_FCAL_endcap_nsides;
+    protected int m_HCAL_endcap_nsides;
+    protected int m_MCAL_endcap_nsides;
+    protected Vector<Double> m_ECAL_barrel_layering_r;
+    protected Vector<Double> m_HCAL_barrel_layering_r;
+    protected Vector<Double> m_MCAL_barrel_layering_r;
+    protected Vector<Double> m_ECAL_endcap_layering_z;
+    protected Vector<Double> m_FCAL_endcap_layering_z;
+    protected Vector<Double> m_HCAL_endcap_layering_z;
+    protected Vector<Double> m_MCAL_endcap_layering_z;
+    protected double m_ECAL_endcap_rmin;
+    protected double m_FCAL_endcap_rmin;
+    protected double m_HCAL_endcap_rmin;
+    protected double m_MCAL_endcap_rmin;
+    protected double m_ECAL_endcap_rmax;
+    protected double m_FCAL_endcap_rmax;
+    protected double m_HCAL_endcap_rmax;
+    protected double m_MCAL_endcap_rmax;
+    protected double[] m_fieldStrength;
+
+    protected double m_cutSeparation = 30.0; // 3cm
+    public void setCutSeparation(double cutSeparation) { m_cutSeparation = cutSeparation; }
+
+    boolean m_barrelValid = false;
+    boolean m_endcapValid = false;
+
+    EventHeader m_event = null;
+    Track m_track = null;
+
+    boolean m_useFCAL = false;
+    boolean m_interceptsFCAL = false;
+    boolean m_debug = false;
+    CalorimeterInformation ci;
+
+    public HelixExtrapolator() {
+	super();
+    }
+
+    protected HelixExtrapolator(HelixExtrapolator old) {
+	super();
+	m_init = false;
+	if (old.m_init && old.m_event != null) {
+	    initGeometry(old.m_event);
+	}
+	
+	setCutSeparation(old.m_cutSeparation);
+	m_barrelValid = old.m_barrelValid;
+	m_endcapValid = old.m_endcapValid;
+	m_event = old.m_event;
+	m_track = old.m_track;
+	useFCAL(old.m_useFCAL);
+	m_interceptsFCAL = old.m_interceptsFCAL;
+	m_debug = old.m_debug;
+    }
+
+    public void setDebug(boolean debug) {
+	m_debug = debug;
+    }
+
+    /** Allow use of FCAL. */
+    public void useFCAL(boolean use) {
+	m_useFCAL = use;
+    }
+
+    /** Process this event, storing geometry info if needed. */
+    public void process(EventHeader event) {
+	m_event = event;
+	initGeometry(event);
+    }
+
+    /** Initialize geometry. This is done automatically if the class is run as a driver. */
+    public void initGeometry(EventHeader event) {
+        if(ci == null)
+        {
+        ci = CalorimeterInformation.instance();
+	m_ECAL_barrel_zmin = ci.getZMin(CalorimeterType.EM_BARREL);
+	m_HCAL_barrel_zmin = ci.getZMin(CalorimeterType.HAD_BARREL);
+	m_MCAL_barrel_zmin = ci.getZMin(CalorimeterType.MUON_BARREL);
+	m_ECAL_barrel_zmax = ci.getZMax(CalorimeterType.EM_BARREL);
+	m_HCAL_barrel_zmax = ci.getZMax(CalorimeterType.HAD_BARREL);
+	m_MCAL_barrel_zmax = ci.getZMax(CalorimeterType.MUON_BARREL);
+	m_ECAL_barrel_nsides = ci.getNSides(CalorimeterType.EM_BARREL);
+	m_HCAL_barrel_nsides = ci.getNSides(CalorimeterType.HAD_BARREL);
+	m_MCAL_barrel_nsides = ci.getNSides(CalorimeterType.MUON_BARREL);
+	m_ECAL_endcap_nsides = ci.getNSides(CalorimeterType.EM_ENDCAP);
+	m_HCAL_endcap_nsides = ci.getNSides(CalorimeterType.HAD_ENDCAP);
+	m_MCAL_endcap_nsides = ci.getNSides(CalorimeterType.MUON_ENDCAP);
+	m_FCAL_endcap_nsides = ci.getNSides(CalorimeterType.LUMI);
+        Subdetector s = ci.getSubdetector(CalorimeterType.EM_BARREL);
+        double ebr0 = ci.getRMin(CalorimeterType.EM_BARREL);
+        if(s instanceof CylindricalBarrelCalorimeter)
+        {
+            ebr0 = 0.;
+        }
+        m_ECAL_barrel_r = ebr0 + s.getLayering().getDistanceToLayerSensorMid(0);
+        s = ci.getSubdetector(CalorimeterType.HAD_BARREL);
+        double hbr0 = ci.getRMin(CalorimeterType.HAD_BARREL);
+        if(s instanceof CylindricalBarrelCalorimeter)
+        {
+            hbr0 = 0.;
+        }
+        m_HCAL_barrel_r = hbr0 + s.getLayering().getDistanceToLayerSensorMid(0);
+        s = ci.getSubdetector(CalorimeterType.MUON_BARREL);
+        double mbr0 = ci.getRMin(CalorimeterType.MUON_BARREL);
+        if(s instanceof CylindricalBarrelCalorimeter)
+        {
+            mbr0 = 0.;
+        }
+        m_MCAL_barrel_r = mbr0 + s.getLayering().getDistanceToLayerSensorMid(0);
+
+        s = ci.getSubdetector(CalorimeterType.EM_ENDCAP);
+        m_ECAL_endcap_z = s.getLayering().getDistanceToLayerSensorMid(0);
+        s = ci.getSubdetector(CalorimeterType.HAD_ENDCAP);
+        m_HCAL_endcap_z = s.getLayering().getDistanceToLayerSensorMid(0);
+        s = ci.getSubdetector(CalorimeterType.MUON_ENDCAP);
+        m_MCAL_endcap_z = s.getLayering().getDistanceToLayerSensorMid(0);
+        s = ci.getSubdetector(CalorimeterType.LUMI);
+        m_FCAL_endcap_z = s.getLayering().getDistanceToLayerSensorMid(0);
+	m_ECAL_endcap_rmin = ci.getRMin(CalorimeterType.EM_ENDCAP);
+	m_HCAL_endcap_rmin = ci.getRMin(CalorimeterType.HAD_ENDCAP);
+	m_MCAL_endcap_rmin = ci.getRMin(CalorimeterType.MUON_ENDCAP);
+	m_FCAL_endcap_rmin = ci.getRMin(CalorimeterType.LUMI);
+	m_ECAL_endcap_rmax = ci.getRMax(CalorimeterType.EM_ENDCAP);
+	m_HCAL_endcap_rmax = ci.getRMax(CalorimeterType.HAD_ENDCAP);
+	m_MCAL_endcap_rmax = ci.getRMax(CalorimeterType.MUON_ENDCAP);
+	m_FCAL_endcap_rmax = ci.getRMax(CalorimeterType.LUMI);
+	Detector det = event.getDetector();
+	double[] zero = {0, 0, 0};
+	m_fieldStrength = det.getFieldMap().getField(zero);
+	m_ECAL_barrel_layering_r = new Vector<Double>();
+	for (int iLayer=0; iLayer<ci.getNLayers(CalorimeterType.EM_BARREL); iLayer++) {
+	    double r = ebr0 + ci.getSubdetector(CalorimeterType.EM_BARREL).getLayering().getDistanceToLayerSensorMid(iLayer);
+	    m_ECAL_barrel_layering_r.add(new Double(r));
+	}
+	m_ECAL_endcap_layering_z = new Vector<Double>();
+	for (int iLayer=0; iLayer<ci.getNLayers(CalorimeterType.EM_ENDCAP); iLayer++) {
+	    double z = ci.getSubdetector(CalorimeterType.EM_ENDCAP).getLayering().getDistanceToLayerSensorMid(iLayer);
+	    m_ECAL_endcap_layering_z.add(new Double(z));
+	}
+	m_HCAL_barrel_layering_r = new Vector<Double>();
+	for (int iLayer=0; iLayer<ci.getNLayers(CalorimeterType.HAD_BARREL); iLayer++) {
+	    double r = hbr0 + ci.getSubdetector(CalorimeterType.HAD_BARREL).getLayering().getDistanceToLayerSensorMid(iLayer);
+	    m_HCAL_barrel_layering_r.add(new Double(r));
+	}
+	m_HCAL_endcap_layering_z = new Vector<Double>();
+	for (int iLayer=0; iLayer<ci.getNLayers(CalorimeterType.HAD_ENDCAP); iLayer++) {
+	    double z = ci.getSubdetector(CalorimeterType.HAD_ENDCAP).getLayering().getDistanceToLayerSensorMid(iLayer);
+	    m_HCAL_endcap_layering_z.add(new Double(z));
+	}
+	m_MCAL_barrel_layering_r = new Vector<Double>();
+	for (int iLayer=0; iLayer<ci.getNLayers(CalorimeterType.MUON_BARREL); iLayer++) {
+	    double r = mbr0 + ci.getSubdetector(CalorimeterType.MUON_BARREL).getLayering().getDistanceToLayerSensorMid(iLayer);
+	    m_MCAL_barrel_layering_r.add(new Double(r));
+	}
+	m_MCAL_endcap_layering_z = new Vector<Double>();
+	for (int iLayer=0; iLayer<ci.getNLayers(CalorimeterType.MUON_ENDCAP); iLayer++) {
+	    double z = ci.getSubdetector(CalorimeterType.MUON_ENDCAP).getLayering().getDistanceToLayerSensorMid(iLayer);
+	    m_MCAL_endcap_layering_z.add(new Double(z));
+	}
+	m_FCAL_endcap_layering_z = new Vector<Double>();
+	for (int iLayer=0; iLayer<ci.getNLayers(CalorimeterType.LUMI); iLayer++) {
+	    double z = ci.getSubdetector(CalorimeterType.LUMI).getLayering().getDistanceToLayerSensorMid(iLayer);
+	    m_FCAL_endcap_layering_z.add(new Double(z));
+	}
+	m_init = true;
+        }
+    }
+ 
+    // Interface
+    abstract public    HelixExtrapolationResult performExtrapolation(Track tr);
+    abstract protected Hep3Vector getInterceptPoint();
+    abstract protected Hep3Vector getTangent();
+    abstract protected Hep3Vector getTangent(Hep3Vector v);
+    abstract protected Hep3Vector extendToEndcapLayer(int layer, Vector<Double> endcap_layering_z, double endcap_rmin, double endcap_rmax, int nsides );
+    abstract protected Hep3Vector extendToBarrelLayer(int layer, Vector<Double> barrel_layering_r, double barrel_zmin, double barrel_zmax, int nsides );
+
+    // Things that only use the interface
+    
+    protected Long extendToECALLayerAndFindCell(int layer) {
+	Hep3Vector point = extendToECALLayer(layer);
+	IDDecoder id = null;
+	if (m_barrelValid) {
+	    id = ci.getIDDecoder(CalorimeterType.EM_BARREL);
+	    if (id == null) { throw new AssertionError("Failed to find barrel ID"); }
+	} else if (m_endcapValid) {
+	    id = ci.getIDDecoder(CalorimeterType.EM_ENDCAP);
+	    if (id == null) { throw new AssertionError("Failed to find endcap ID"); }
+	}
+	if (id != null && point != null) {
+	    if (Double.isNaN(point.x()) || Double.isNaN(point.y()) || Double.isNaN(point.z())) { throw new AssertionError("ERROR: Asked to look up call with invalid co-ordinates."); }
+            long cell = 0;
+            try
+            {
+                cell = id.findCellContainingXYZ(point); // This gets stuck for point = (NaN, NaN, NaN)
+            }
+            catch(Exception e)
+            {
+                return null;
+            }
+	    id.setID(cell);
+	    return new Long(cell);
+	} else {
+	    return null;
+	}
+    }
+
+    protected Long extendToHCALLayerAndFindCell(int layer) {
+	Hep3Vector point = extendToHCALLayer(layer);
+	IDDecoder id = null;
+	if (m_barrelValid) {
+	    id = ci.getIDDecoder(CalorimeterType.HAD_BARREL);
+	    if (id == null) { throw new AssertionError("Failed to find barrel ID"); }
+	} else if (m_endcapValid) {
+	    id = ci.getIDDecoder(CalorimeterType.HAD_ENDCAP);
+	    if (id == null) { throw new AssertionError("Failed to find endcap ID"); }
+	}
+	if (id != null && point != null) {
+            long cell = 0;
+            try
+            {
+                cell = id.findCellContainingXYZ(point); // This gets stuck for point = (NaN, NaN, NaN)
+            }
+            catch(Exception e)
+            {
+                return null;
+            }
+	    id.setID(cell);
+	    return new Long(cell);
+	} else {
+	    return null;
+	}
+    }
+
+    protected Long extendToMCALLayerAndFindCell(int layer) {
+	Hep3Vector point = extendToMCALLayer(layer);
+	IDDecoder id = null;
+	if (m_barrelValid) {
+	    id = ci.getIDDecoder(CalorimeterType.MUON_BARREL);
+	    if (id == null) { throw new AssertionError("Failed to find barrel ID"); }
+	} else if (m_endcapValid) {
+	    id = ci.getIDDecoder(CalorimeterType.MUON_ENDCAP);
+	    if (id == null) { throw new AssertionError("Failed to find endcap ID"); }
+	}
+	if (id != null && point != null) {
+            long cell = 0;
+            try
+            {
+                cell = id.findCellContainingXYZ(point); // This gets stuck for point = (NaN, NaN, NaN)
+            }
+            catch(Exception e)
+            {
+                return null;
+            }
+	    id.setID(cell);
+	    return new Long(cell);
+	} else {
+	    return null;
+	}
+    }
+
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToECALLayer(int layer) {
+	if (m_barrelValid) {
+	    return extendToECALBarrelLayer(layer);
+	} else if (m_endcapValid) {
+	    return extendToECALEndcapLayer(layer);
+	} else {
+	    // No solution
+	    return null;
+	}
+    }
+
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToHCALLayer(int layer) {
+	if (m_barrelValid) {
+	    return extendToHCALBarrelLayer(layer);
+	} else if (m_endcapValid) {
+	    return extendToHCALEndcapLayer(layer);
+	} else {
+	    // No solution
+	    return null;
+	}
+    }
+
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToMCALLayer(int layer) {
+	if (m_barrelValid) {
+	    return extendToMCALBarrelLayer(layer);
+	} else if (m_endcapValid) {
+	    return extendToMCALEndcapLayer(layer);
+	} else {
+	    // No solution
+	    return null;
+	}
+    }
+
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToECALBarrelLayer(int layer) {
+	return extendToBarrelLayer(layer, m_ECAL_barrel_layering_r, m_ECAL_barrel_zmin, m_ECAL_barrel_zmax, m_ECAL_barrel_nsides);
+    }
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToECALEndcapLayer(int layer) {
+	if (m_useFCAL && m_interceptsFCAL) {
+	    Hep3Vector vec = extendToEndcapLayer(layer, m_FCAL_endcap_layering_z, m_FCAL_endcap_rmin, m_FCAL_endcap_rmax, m_FCAL_endcap_nsides);
+	    if (vec != null) {
+		return vec;
+	    }
+	}
+	return extendToEndcapLayer(layer, m_ECAL_endcap_layering_z, m_ECAL_endcap_rmin, m_ECAL_endcap_rmax, m_ECAL_endcap_nsides);
+    }
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToHCALBarrelLayer(int layer) {
+	return extendToBarrelLayer(layer, m_HCAL_barrel_layering_r, m_HCAL_barrel_zmin, m_HCAL_barrel_zmax, m_HCAL_barrel_nsides);
+    }
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToHCALEndcapLayer(int layer) {
+	return extendToEndcapLayer(layer, m_HCAL_endcap_layering_z, m_HCAL_endcap_rmin, m_HCAL_endcap_rmax, m_HCAL_endcap_nsides);
+    }
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToMCALBarrelLayer(int layer) {
+	return extendToBarrelLayer(layer, m_MCAL_barrel_layering_r, m_MCAL_barrel_zmin, m_MCAL_barrel_zmax, m_MCAL_barrel_nsides);
+    }
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector extendToMCALEndcapLayer(int layer) {
+	return extendToEndcapLayer(layer, m_MCAL_endcap_layering_z, m_MCAL_endcap_rmin, m_MCAL_endcap_rmax, m_MCAL_endcap_nsides);
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
LocalHelixExtrapolationTrackClusterMatcher.java added at 1.1
diff -N LocalHelixExtrapolationTrackClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ LocalHelixExtrapolationTrackClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,211 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.util.decision.*;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.Track;
+import org.lcsim.util.Driver;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+
+/**
+ * Attempt to match a Track to a Cluster. The Track is extrapolated to the inner surface
+ * of the ECAL with a helix derived from nearby tracker hits. We use truth information
+ * to find the tracker hits, so we need to look up truth information for the Track.
+ *
+ * Certain cuts are hard-coded, but additional cuts can be specified via a
+ * DecisionMakerPair<Track,Cluster>.
+ */
+
+
+public class LocalHelixExtrapolationTrackClusterMatcher extends Driver implements TrackClusterMatcher
+{
+    protected boolean init;
+    protected CalorimeterInformation ci;
+    /** Simple constructor. */
+    public LocalHelixExtrapolationTrackClusterMatcher(HelixExtrapolator extrap) {
+	super();
+	m_extrap = extrap;
+                init = false;
+    }
+
+    /** Constructor, specifying additional cut to apply. */
+    public LocalHelixExtrapolationTrackClusterMatcher(DecisionMakerPair<Track,Cluster> extraCut, HelixExtrapolator extrap) { 
+	super();
+	m_extraCut = extraCut;
+	m_extrap = extrap;
+                init = false;
+    }
+
+    /** Another way to specify the additional cut. */
+    public void setExtraCheck(DecisionMakerPair<Track,Cluster> extraCut) { 
+	m_extraCut = extraCut;
+    }
+
+    /** Process this event, storing geometry info if needed. */
+    public void process(EventHeader event) {
+        if(!init)
+        {
+            ci = CalorimeterInformation.instance();
+            init = true;
+        }
+	m_event = event;
+	m_extrap.process(event);
+    }
+
+    /**
+      * Match this track to a cluster from the list supplied.
+      * Returns the best matched cluster, or null if there is no
+      * acceptable match.
+      */
+    public Cluster matchTrackToCluster(Track tr, List<Cluster> clusters) {
+        if(!init)
+        {
+            ci = CalorimeterInformation.instance();
+            init = true;
+        }
+	if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+" trying to match track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" to a list of "+clusters.size()+" clusters."); }
+	HelixExtrapolationResult result = m_extrap.performExtrapolation(tr);
+	Hep3Vector point = null;
+	if (result != null) {
+	    point = result.getInterceptPoint();
+	}
+	if (point != null) {
+	    // Extrapolated to ECAL OK.
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": extrapolated track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" to r="+(Math.sqrt(point.x()*point.x()+point.y()*point.y()))+", z="+point.z()); }
+	    // Look through clusters, starting with the nearest ones:
+	    List<Cluster> nearestClusters = findNearestClusters(point, clusters);
+	    for (Cluster nearbyCluster : nearestClusters) {
+		double separation = proximity(point, nearbyCluster);
+		if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": comparing track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" to a cluster of "+nearbyCluster.getCalorimeterHits().size()+" hits at a separation of "+separation); }
+		if (separation > m_cutSeparation) {
+		    // This cluster (and therefore all subsequent ones) are too far away to pass
+		    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": for track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+", remaining clusters are too far away (cut="+m_cutSeparation+") => no match."); }
+		    break;
+		} else {
+		    // Separation OK. Next, check that the cluster has a hit in the first n layers of the ECAL:
+		    CalorimeterHit firstHitInECAL = findInnermostHitInECAL(nearbyCluster);
+		    if (firstHitInECAL != null && getVLayer(firstHitInECAL) < m_cutFirstLayer) {
+			if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": comparing track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" to a cluster of "+nearbyCluster.getCalorimeterHits().size()+" hits at a separation of "+separation+": First hit in ECAL is in layer "+getVLayer(firstHitInECAL)+" -- OK!"); }
+			// First hit layer OK.
+			if (m_extraCut == null || m_extraCut.valid(tr,nearbyCluster) ) {
+			    // Extra cut not specified or passed
+			    // All cuts passed.
+			    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": comparing track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" to a cluster of "+nearbyCluster.getCalorimeterHits().size()+" hits at a separation of "+separation+": All cuts passed => accept"); }
+			    return nearbyCluster;
+			} else {
+			    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": comparing track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" to a cluster of "+nearbyCluster.getCalorimeterHits().size()+" hits at a separation of "+separation+": Failed extra cut ("+m_extraCut.getClass().getName()+")"); }
+			}
+		    } else {
+			if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": comparing track with p="+(new BasicHep3Vector(tr.getMomentum())).magnitude()+" to a cluster of "+nearbyCluster.getCalorimeterHits().size()+" hits at a separation of "+separation+": Failed to find hit in ECAL layer < "+m_cutFirstLayer); }
+		    }
+		}
+	    }
+	} else {
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+": failed to extrapolate track => no match"); }
+	}
+	// No valid match
+	return null;
+    }
+
+    public void setCutSeparation(double cutSeparation) { m_cutSeparation = cutSeparation; m_extrap.setCutSeparation(cutSeparation); }
+    public void setCutFirstLayer(int cutFirstLayer) { m_cutFirstLayer = cutFirstLayer; }
+
+    public HelixExtrapolator getExtrapolator() { return m_extrap; }
+    public void setExtrapolator(HelixExtrapolator extrap) { m_extrap = extrap; }
+
+    protected DecisionMakerPair<Track,Cluster> m_extraCut = null;
+    protected double m_cutSeparation = 30.0; // 3cm
+    protected int    m_cutFirstLayer = 5; // Cluster must have a hit in in layer 0,1,2,3, or 4.
+    protected EventHeader m_event;
+    protected HelixExtrapolator m_extrap = null;
+
+    // Utility routines
+    // ----------------
+
+    protected List<Cluster> findNearestClusters(Hep3Vector point, List<Cluster> clusterList) {
+	Map<Cluster,Double> mapClusterToDistance = new HashMap<Cluster, Double>();
+	List<Cluster> sortedListOfClusters = new Vector<Cluster>();
+	for (Cluster clus : clusterList) {
+	    double dist = proximity(point, clus);
+	    mapClusterToDistance.put(clus, new Double(dist));
+	    sortedListOfClusters.add(clus);
+	}
+	Comparator<Cluster> comp = new CompareMapping<Cluster>(mapClusterToDistance);
+	Collections.sort(sortedListOfClusters, comp);
+	return sortedListOfClusters;
+    }
+    protected CalorimeterHit findInnermostHitInECAL(Cluster clus) {
+	CalorimeterHit innermostHit = null;
+	for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+	    int layer = getVLayer(hit);
+	    org.lcsim.geometry.Subdetector subdet = hit.getSubdetector();	    
+            if ( ! subdet.isCalorimeter() ) { throw new AssertionError("Cluster hit outside calorimeter"); }
+            String name = subdet.getName();
+            if (name.compareTo(ci.getName(CalorimeterType.EM_BARREL)) == 0 || name.compareTo(ci.getName(CalorimeterType.EM_ENDCAP)) == 0) {
+                // EM -- OK
+		if (innermostHit==null || getVLayer(innermostHit)>layer) {
+		    innermostHit = hit;
+		}
+	    }
+	}
+	return innermostHit;
+    }
+    protected double proximity(Hep3Vector point, Cluster clus) {
+	CalorimeterHit nearestHit = findNearestHit(point, clus);
+	return proximity(point, nearestHit);
+    }
+    protected CalorimeterHit findNearestHit(Hep3Vector point, Cluster clus) {
+	CalorimeterHit nearest = null;
+	double minDist = 0;
+	for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+	    Hep3Vector hitPosition = new BasicHep3Vector(hit.getPosition());
+	    double distance = VecOp.sub(hitPosition, point).magnitude();
+	    if (distance<minDist || nearest==null) {
+		nearest = hit;
+		minDist = distance;
+	    }
+	}
+	return nearest;
+    }
+    protected double proximity(Hep3Vector point, CalorimeterHit hit) {
+	Hep3Vector hitPosition = new BasicHep3Vector(hit.getPosition());
+	double distance = VecOp.sub(hitPosition, point).magnitude();
+	return distance;
+    }
+    protected int getVLayer(CalorimeterHit hit) {
+	org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
+	id.setID(hit.getCellID());
+	int layer = id.getVLayer();
+	return layer;
+    }
+
+    private class CompareMapping<T> implements Comparator<T> {
+	public CompareMapping(Map<T,Double> map) {
+	    m_map = map;
+	}
+	public int compare(Object o1, Object o2) {
+	    Cluster c1 = (Cluster) o1;
+	    Cluster c2 = (Cluster) o2;
+	    Double D1 = m_map.get(c1);
+	    Double D2 = m_map.get(c2);
+	    if (D1.equals(D2)) {
+		// Equal
+		return 0;
+	    } else if (D1.doubleValue() < D2.doubleValue()) {
+		return -1;
+	    } else {
+		return +1;
+	    }
+	}
+	Map<T,Double> m_map;
+    }
+
+    protected boolean m_debug = false;
+    public void setDebug(boolean debug) { m_debug = debug ; }
+}
+

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
LocalHelixExtrapolationTrackMIPClusterMatcher.java added at 1.1
diff -N LocalHelixExtrapolationTrackMIPClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ LocalHelixExtrapolationTrackMIPClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,83 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.util.decision.*;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.geometry.Detector;
+import org.lcsim.recon.cluster.util.BasicCluster;
+import org.lcsim.recon.cluster.util.TensorClusterPropertyCalculator;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Track;
+import org.lcsim.event.SimTrackerHit;
+import org.lcsim.event.SimCalorimeterHit;
+
+public class LocalHelixExtrapolationTrackMIPClusterMatcher extends LocalHelixExtrapolationTrackClusterMatcher
+{
+    public LocalHelixExtrapolationTrackMIPClusterMatcher(HelixExtrapolator extrap) {
+	super(extrap);
+    }
+
+    public LocalHelixExtrapolationTrackMIPClusterMatcher(DecisionMakerPair<Track,Cluster> extraCut, HelixExtrapolator extrap) { 
+	super(extraCut, extrap);
+    }
+
+    public Cluster matchTrackToCluster(Track tr, List<Cluster> clusters) {
+	HelixExtrapolationResult result = m_extrap.performExtrapolation(tr);
+	Hep3Vector point = null;
+	if (result != null) { point = result.getInterceptPoint(); }
+	if (point != null) {
+	    // Extrapolated to ECAL OK.
+	    // Now, what is the tangent at that point?
+	    Hep3Vector tangent = m_extrap.getTangent();
+	    // Look through clusters, starting with the nearest ones:
+	    List<Cluster> nearestClusters = findNearestClusters(point, clusters);
+	    for (Cluster nearbyCluster : nearestClusters) {
+		double separation = proximity(point, nearbyCluster);
+		if (separation > m_cutSeparation) {
+		    // This cluster (and therefore all subsequent ones) are too far away to pass
+		    break;
+		} else {
+		    // Separation OK. Next, check that the cluster has a hit in the first n layers of the ECAL:
+		    CalorimeterHit firstHitInECAL = findInnermostHitInECAL(nearbyCluster);
+		    if (firstHitInECAL != null && getVLayer(firstHitInECAL) < m_cutFirstLayer) {
+			// First hit layer OK. Next, check the dot-product of directions:
+			double unitDotProduct = findUnitDotProduct(tangent, nearbyCluster);
+			if (Math.abs(unitDotProduct) > m_cutDotProduct) {
+			    // Dot product OK. Next, check extra cut if specified:
+			    if (m_extraCut == null || m_extraCut.valid(tr,nearbyCluster) ) {
+				// Extra cut not specified or passed
+				// All cuts passed.
+				return nearbyCluster;
+			    }
+			}
+		    }
+		}
+	    }
+	}	
+	// No valid match
+	return null;
+    }
+    
+    protected double findUnitDotProduct(Hep3Vector tangent, Cluster clus) {
+	// Find the cluster direction
+	BasicCluster copy = new BasicCluster();
+	copy.addCluster(clus);
+	TensorClusterPropertyCalculator calc = new TensorClusterPropertyCalculator();
+	copy.setPropertyCalculator(calc);
+	copy.calculateProperties();
+	double[][]axes = calc.getPrincipleAxis();
+	Hep3Vector clusterDir = new BasicHep3Vector(axes[0][0], axes[0][1], axes[0][2]);
+	// Get the dot product:
+	double unitDotProduct = VecOp.dot(tangent, clusterDir) / (tangent.magnitude() * clusterDir.magnitude());
+	if (m_debug) { 
+	    System.out.println("DEBUG: LocalHelixExtrapolationTrackMIPClusterMatcher: Computed dot product as clusterDir=("+clusterDir.x()+", "+clusterDir.y()+", "+clusterDir.z()+"), tangent=("+tangent.x()+", "+tangent.y()+", "+tangent.z()+") using "+copy.getCalorimeterHits().size()+" hits.");
+	}
+	return unitDotProduct;
+    }
+
+    protected double m_cutDotProduct = 0.85;
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
LocalHelixExtrapolator.java added at 1.1
diff -N LocalHelixExtrapolator.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ LocalHelixExtrapolator.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,462 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.util.decision.*;
+
+import hep.physics.particle.Particle;
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
+import org.lcsim.geometry.Detector;
+import org.lcsim.recon.cluster.util.BasicCluster;
+import org.lcsim.recon.cluster.util.TensorClusterPropertyCalculator;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.Track;
+import org.lcsim.util.Driver;
+import org.lcsim.event.MCParticle;
+import org.lcsim.mc.fast.tracking.ReconTrack;
+import org.lcsim.event.SimCalorimeterHit;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackClusterMatcher;
+import org.lcsim.event.SimTrackerHit;
+import org.lcsim.mc.fast.tracking.ReconTrack;
+import org.lcsim.event.base.*;
+import org.lcsim.geometry.IDDecoder;
+
+/**
+ * Support class for local helix extrapolation, used to estimate
+ * intercept point of simulated track on an arbitrary ECAL layer.
+ * This will at some point be replaced by full tracking so it's
+ * not a good idea to depend heavily on it.
+ *
+ */
+
+public class LocalHelixExtrapolator extends HelixExtrapolator
+{
+    double m_trackParam_xc = 0.0;
+    double m_trackParam_yc = 0.0;
+    double m_trackParam_radius = 0.0;
+    double m_trackParam_dz_by_dphi = 0.0;
+    double m_trackPoint_z = 0.0;
+    double m_trackPoint_phi = 0.0;
+    double m_track_z0 = 0.0;
+    double m_track_phi0 = 0.0;
+    double m_track_phi1 = 0.0;
+    boolean m_track_dphi_negative = false;
+
+    boolean m_debugChargeFlip = false;
+
+    // ONLY "performExtrapolation" is allowed to modify this!
+    private BasicHep3Vector m_lastInterceptPoint = null;
+
+    public LocalHelixExtrapolator() {
+	super();
+    }
+
+    protected LocalHelixExtrapolator(LocalHelixExtrapolator old) {
+	super(old);
+	m_trackParam_xc = old.m_trackParam_xc;
+	m_trackParam_yc = old.m_trackParam_yc;
+	m_trackParam_radius = old.m_trackParam_radius;
+	m_trackParam_dz_by_dphi = old.m_trackParam_dz_by_dphi;
+	m_trackPoint_z = old.m_trackPoint_z;
+	m_trackPoint_phi = old.m_trackPoint_phi;
+	m_track_z0 = old.m_track_z0;
+	m_track_phi0 = old.m_track_phi0;
+	m_track_phi1 = old.m_track_phi1;
+	m_track_dphi_negative = old.m_track_dphi_negative;
+	m_debugChargeFlip = old.m_debugChargeFlip;
+	if (old.m_lastInterceptPoint == null) {
+	    m_lastInterceptPoint = null;
+	} else {
+	    m_lastInterceptPoint = new BasicHep3Vector(old.m_lastInterceptPoint.x(), old.m_lastInterceptPoint.y(), old.m_lastInterceptPoint.z());
+	}
+    }
+
+    public void process(EventHeader event) {
+	super.process(event);
+    }
+
+    // Utility routines
+    // ----------------
+
+    protected Hep3Vector getInterceptPoint() {
+	if (m_lastInterceptPoint == null) {
+	    return null;
+	} else {
+	    return new BasicHep3Vector(m_lastInterceptPoint.x(), m_lastInterceptPoint.y(), m_lastInterceptPoint.z());
+	}
+    }
+
+    public HelixExtrapolationResult performExtrapolation(Track tr) {
+	m_track = tr;
+	if (tr == null) {
+	    // Null track -- blank everything and return failure
+	    m_trackParam_xc = m_trackParam_yc = m_trackParam_radius = m_trackParam_dz_by_dphi = m_trackPoint_z = m_trackPoint_phi = m_track_z0 = m_track_phi0 = m_track_phi1 = 0.0;
+	    return null;
+	}
+
+	// Sanity check -- check for a specific case where we try to
+	// extrapolate an unphysical track of class MultipleTrackTrack.
+	{
+	    Class cl = tr.getClass();
+	    String className = cl.getName();
+	    boolean fake = (className.contains("MultipleTrackTrack"));
+	    if (fake) { throw new AssertionError("Tried to do extrapolation of bogus track!"); }
+	}
+	
+	// Find hits. For now these are SimTrackerHits because
+	// of the inability to get hold of TrackerHits.
+	Collection<SimTrackerHit> trackerHits = findHits(tr);
+
+	if (trackerHits.size() < 3) { 
+	    // Need at least 3 hits to make a helix.
+	    m_lastInterceptPoint = null;
+	    return null;
+	}
+	// Hit 0 is the closest to the calorimeter (confusing!)
+	SimTrackerHit hit0 = null;
+	SimTrackerHit hit1 = null;
+	SimTrackerHit hit2 = null;
+	for (SimTrackerHit hit : trackerHits) {
+	    if (hit0==null || Math.abs(hit.getPoint()[2])>Math.abs(hit0.getPoint()[2])) {
+		hit2 = hit1;
+		hit1 = hit0;
+		hit0 = hit;
+	    } else if (hit1==null || Math.abs(hit.getPoint()[2])>Math.abs(hit1.getPoint()[2])) {
+		hit2 = hit1;
+		hit1 = hit;
+	    } else if (hit2==null || Math.abs(hit.getPoint()[2])>Math.abs(hit2.getPoint()[2])) {
+		hit2 = hit;
+	    }
+	}
+
+	// First look at the xy (circle) projection
+	double x0 = hit0.getPoint()[0];
+	double x1 = hit1.getPoint()[0];
+	double x2 = hit2.getPoint()[0];
+	double y0 = hit0.getPoint()[1];
+	double y1 = hit1.getPoint()[1];
+	double y2 = hit2.getPoint()[1];
+	double a1 = 2.0 * (x1-x0);
+	double a2 = 2.0 * (x2-x0);
+	double b1 = 2.0 * (y1-y0);
+	double b2 = 2.0 * (y2-y0);
+	double c1 = -x0*x0 -y0*y0 +x1*x1 +y1*y1;
+	double c2 = -x0*x0 -y0*y0 +x2*x2 +y2*y2;
+	// Circle center is at (x_c, y_c)
+	double x_c = (c1*b2-c2*b1)/(a1*b2-a2*b1);
+	double y_c = (c1*a2-c2*a1)/(b1*a2-b2*a1);
+	// Circle radius
+	double radiusSquared = (x_c-x0)*(x_c-x0) + (y_c-y0)*(y_c-y0);
+	double radius = Math.sqrt(radiusSquared);
+	// Now look at z/phi projection
+	// Watch out, this is phi around the circle, not the azimuthal angle phi
+	double z0 = hit0.getPoint()[2];
+	double z1 = hit1.getPoint()[2];
+	double z2 = hit2.getPoint()[2];
+	double phi0 = Math.atan2(y0-y_c, x0-x_c); // in the range -pi through +pi
+	double phi1 = Math.atan2(y1-y_c, x1-x_c);
+	double phi2 = Math.atan2(y2-y_c, x2-x_c);
+	double dz = (z0-z1);
+	double dphi = (phi0-phi1);
+	while (dphi < -Math.PI) { dphi += 2.0*Math.PI; }
+	while (dphi > Math.PI) { dphi -= 2.0*Math.PI; }
+	double dz_by_dphi = dz/dphi;
+	m_track_dphi_negative = (dphi < 0);
+	// Now, try to project along to the endcaps (fairly straightforward)
+	double dz_to_endcap = Math.abs(m_ECAL_endcap_z) - z0;
+	if (z0 < 0) {
+	    dz_to_endcap = -Math.abs(m_ECAL_endcap_z) - z0;
+	}
+	double dphi_to_endcap = dz_to_endcap / dz_by_dphi;
+	double found_endcap_z = z0 + dz_to_endcap;
+	double found_endcap_phi = phi0 + dphi_to_endcap;
+	double found_endcap_x = x_c + radius * Math.cos(found_endcap_phi);
+	double found_endcap_y = y_c + radius * Math.sin(found_endcap_phi);
+	double found_endcap_polar_r = Math.sqrt(found_endcap_x*found_endcap_x + found_endcap_y*found_endcap_y);
+	double found_endcap_polar_phi = Math.atan2(found_endcap_y, found_endcap_x);
+	m_endcapValid = (found_endcap_polar_r >= m_ECAL_endcap_rmin-m_cutSeparation && found_endcap_polar_r <= m_ECAL_endcap_rmax+m_cutSeparation);
+	boolean m_interceptsFCAL = false;
+	if (m_useFCAL) {
+	    m_endcapValid = m_endcapValid || (found_endcap_polar_r >= m_FCAL_endcap_rmin-m_cutSeparation && found_endcap_polar_r <= m_FCAL_endcap_rmax+m_cutSeparation);
+	    m_interceptsFCAL = (found_endcap_polar_r >= m_FCAL_endcap_rmin-m_cutSeparation && found_endcap_polar_r <= m_FCAL_endcap_rmax);
+	}
+	// Now project along to the barrel (harder!)
+	// We have phi such that (x-x_c)=a*cos(phi), (y-y_c)=a*sin(phi)
+	// Define theta such that x_c = b*cos(theta), y_c = b*sin(theta)
+	double a = radius;
+	double b = Math.sqrt(x_c*x_c + y_c*y_c);
+	double r = m_ECAL_barrel_r; // barrel radius
+	double cos_phi_minus_theta = (r*r - a*a - b*b) / (2.0*a*b); // Obviously, this blows up if a or b is zero
+	double theta = Math.atan2(y_c, x_c); // in the range (-pi, +pi)
+	double dphi_to_barrel = 0.0;
+	m_barrelValid = false;
+	if (cos_phi_minus_theta < -1.0) {
+	    // No solution
+	} else if (cos_phi_minus_theta == -1.0) {
+	    // Unique solution: phi = theta + pi
+	    dphi_to_barrel = theta + Math.PI - phi0;
+	    while (dphi_to_barrel < -Math.PI) { dphi_to_barrel += 2.0*Math.PI; }
+	    while (dphi_to_barrel > Math.PI) { dphi_to_barrel -= 2.0*Math.PI; }
+	    m_barrelValid = true;
+	} else if (cos_phi_minus_theta == 1.0) {
+	    // Unique solution: phi = theta
+	    dphi_to_barrel = theta - phi0;
+	    while (dphi_to_barrel < -Math.PI) { dphi_to_barrel += 2.0*Math.PI; }
+	    while (dphi_to_barrel > Math.PI) { dphi_to_barrel -= 2.0*Math.PI; }
+	    m_barrelValid = true;
+	} else if (cos_phi_minus_theta > 1.0) {
+	    // No solution
+	} else {
+	    // Two solutions
+	    double phi_minus_theta_first_solution = Math.acos(cos_phi_minus_theta); // in the range 0 through pi
+	    double phi_minus_theta_second_solution = -phi_minus_theta_first_solution; // in the range -pi through 0
+	    double phi_first_solution = phi_minus_theta_first_solution + theta; // in the range (-pi, 2pi)
+	    double phi_second_solution = phi_minus_theta_second_solution + theta; // in the range (-2pi, pi)
+	    double dphi_to_barrel_firstSolution = phi_first_solution - phi0;
+	    double dphi_to_barrel_secondSolution = phi_second_solution - phi0;
+	    while (dphi_to_barrel_firstSolution < -Math.PI) { dphi_to_barrel_firstSolution += 2.0*Math.PI; }
+	    while (dphi_to_barrel_secondSolution < -Math.PI) { dphi_to_barrel_secondSolution += 2.0*Math.PI; }
+	    while (dphi_to_barrel_firstSolution > Math.PI) { dphi_to_barrel_firstSolution -= 2.0*Math.PI; }
+	    while (dphi_to_barrel_secondSolution > Math.PI) { dphi_to_barrel_secondSolution -= 2.0*Math.PI; }
+	    // OK, now which of the two solutions is better?
+	    double test_dphi1 = dphi_to_barrel_firstSolution * dphi;
+	    double test_dphi2 = dphi_to_barrel_secondSolution * dphi;
+	    while (test_dphi1 < 0) { test_dphi1 += 2.0*Math.PI; }
+	    while (test_dphi2 < 0) { test_dphi2 += 2.0*Math.PI; }
+	    if (test_dphi1 < test_dphi2) {
+		dphi_to_barrel = dphi_to_barrel_firstSolution;
+	    } else {
+		dphi_to_barrel = dphi_to_barrel_secondSolution;
+	    }
+	    while (dphi_to_barrel < -Math.PI) { dphi_to_barrel += 2.0*Math.PI; }
+	    while (dphi_to_barrel > Math.PI) { dphi_to_barrel -= 2.0*Math.PI; }		    
+	    m_barrelValid = true;
+	}
+	double found_barrel_z = z0 + dz_by_dphi * dphi_to_barrel;
+	double found_barrel_phi = phi0 + dphi_to_barrel;
+	double found_barrel_x = x_c + radius * Math.cos(found_barrel_phi);
+	double found_barrel_y = y_c + radius * Math.sin(found_barrel_phi);
+	double found_barrel_polar_r = Math.sqrt(found_barrel_x*found_barrel_x + found_barrel_y*found_barrel_y);
+	double found_barrel_polar_phi = Math.atan2(found_barrel_y, found_barrel_x);
+	m_barrelValid = m_barrelValid && (found_barrel_z >= m_ECAL_barrel_zmin-m_cutSeparation && found_barrel_z <= m_ECAL_barrel_zmax+m_cutSeparation);
+	
+	if (m_barrelValid && m_endcapValid) {
+	    // Weird case: two possible solutions
+	    // Look at which is closer to last tracker hit.
+	    double distanceToBarrelSq = (x0-found_barrel_x)*(x0-found_barrel_x) + (y0-found_barrel_y)*(y0-found_barrel_y) + (z0-found_barrel_z)*(z0-found_barrel_z);
+	    double distanceToEndcapSq = (x0-found_endcap_x)*(x0-found_endcap_x) + (y0-found_endcap_y)*(y0-found_endcap_y) + (z0-found_endcap_z)*(z0-found_endcap_z);
+	    if (distanceToBarrelSq<distanceToEndcapSq) {
+		m_endcapValid = false;
+	    } else {
+		m_barrelValid = false;
+	    }
+	}
+
+	m_trackParam_xc = x_c;
+	m_trackParam_yc = y_c;
+	m_trackParam_radius = radius;
+	m_trackParam_dz_by_dphi = dz_by_dphi;
+	m_track_phi0 = phi0;
+	m_track_phi1 = phi1;
+	m_track_z0 = z0;
+	if (m_endcapValid) {
+	    m_trackPoint_z = found_endcap_z;
+	    m_trackPoint_phi = found_endcap_phi;
+	    m_lastInterceptPoint = new BasicHep3Vector(found_endcap_x, found_endcap_y, found_endcap_z);
+	    HelixExtrapolationResult output = new HelixExtrapolationResult(new LocalHelixExtrapolator(this));
+	    if (output.getInterceptPoint() == null) { throw new AssertionError("Successful extrapolation, but intercept point is null!"); }
+	    return output;
+	}
+	if (m_barrelValid) {
+	    m_trackPoint_z = found_barrel_z;
+	    m_trackPoint_phi = found_barrel_phi;
+	    m_lastInterceptPoint = new BasicHep3Vector(found_barrel_x, found_barrel_y, found_barrel_z);
+	    HelixExtrapolationResult output = new HelixExtrapolationResult(new LocalHelixExtrapolator(this));
+	    if (output.getInterceptPoint() == null) { throw new AssertionError("Successful extrapolation, but intercept point is null!"); }
+	    return output;
+	}
+
+	// No solution
+	m_lastInterceptPoint = null;
+	return null;
+    }
+
+    private Collection<SimTrackerHit> findHits(Track tr) {
+	// Find truth particle of track:
+	MCParticle truth = null;
+	if (tr instanceof org.lcsim.mc.fast.tracking.ReconTrack) {
+	    org.lcsim.mc.fast.tracking.ReconTrack reconTr = (org.lcsim.mc.fast.tracking.ReconTrack) (tr);
+	    truth = (MCParticle)(reconTr.getMCParticle());
+	} else if (tr instanceof BaseTrackMC) {
+	    truth = ((BaseTrackMC)(tr)).getMCParticle();
+	}
+	// Look up all hits in event:
+	List<SimTrackerHit> trackerHits = new Vector<SimTrackerHit>();
+	trackerHits.addAll(m_event.get(org.lcsim.event.SimTrackerHit.class, "TkrBarrHits"));
+	trackerHits.addAll(m_event.get(org.lcsim.event.SimTrackerHit.class, "TkrEndcapHits"));
+	// Find hits that match track:
+	List<SimTrackerHit> hitsMatched = new Vector<org.lcsim.event.SimTrackerHit>();
+	for (SimTrackerHit hit : trackerHits) {
+	    if (hit.getMCParticle() == truth) {
+		hitsMatched.add(hit);
+	    }
+	}
+	return hitsMatched;
+    }
+
+    /** Assumes extrapolation has already been done. */
+    protected Hep3Vector getTangent() {
+	double dphi = 0.01;
+	if (m_track.getCharge() > 0.0) { dphi = -0.01; }
+	double dx = m_trackParam_radius * ( Math.cos(m_trackPoint_phi+dphi) - Math.cos(m_trackPoint_phi) );
+	double dy = m_trackParam_radius * ( Math.sin(m_trackPoint_phi+dphi) - Math.sin(m_trackPoint_phi) );
+	double dz = m_trackParam_dz_by_dphi * dphi;
+	Hep3Vector tangent = VecOp.unit(new BasicHep3Vector(dx,dy,dz));
+	return tangent;
+    }
+
+    protected Hep3Vector getTangent(Hep3Vector v){
+                                                                                                                              
+        double x0 = v.x();
+        double y0 = v.y();
+        double z0 = v.z();
+
+        double phi0 = Math.atan2(y0-m_trackParam_yc, x0-m_trackParam_xc); // in the range -pi through +pi
+
+        if(m_debug){
+            Hep3Vector P = new BasicHep3Vector(m_track.getMomentum());
+            System.out.println("Center of radius= " + m_trackParam_xc + " " + m_trackParam_yc);                             
+            System.out.println("This position= " + x0 + " " + y0 + " " + z0);
+            System.out.println("phi at this point= " + phi0);
+            Double pT = Math.sqrt(P.x()*P.x()+P.y()*P.y());
+            double zField = m_event.getDetector().getFieldMap().getField(P).z();
+            double r = 1000*pT/(0.3*zField);
+            System.out.println("R from track = " + m_trackParam_radius);
+            System.out.println("R from p/0.3B=  " + r + " magnetic field= " + zField);
+        }
+
+        if(m_track_dphi_negative && m_track.getCharge() < 0.0) {
+	    if (m_debugChargeFlip || m_debug) {
+		System.out.println("Error: rotational direction is wrong");
+		System.out.println("Charge is negative but it is determined to turn clockwise");
+	    }
+	}
+                                                                                                                              
+        double dphi = 0.01;
+	if (m_track.getCharge() > 0.0) { dphi = -0.01; }
+        double dx = m_trackParam_radius * ( Math.cos(phi0+dphi) - Math.cos(phi0) );
+        double dy = m_trackParam_radius * ( Math.sin(phi0+dphi) - Math.sin(phi0) );
+        double dz = m_trackParam_dz_by_dphi * dphi;
+                                                                                                                              
+        Hep3Vector tangent = VecOp.unit(new BasicHep3Vector(dx,dy,dz));
+        return tangent;
+    }
+
+    protected Hep3Vector extendToBarrelLayer(int layer, Vector<Double> barrel_layering_r, double barrel_zmin, double barrel_zmax, int nsides )
+    {
+	if (!m_barrelValid && !m_endcapValid) { return null; }
+
+	double dphi = (m_track_phi0 - m_track_phi1);
+	while (dphi < -Math.PI) { dphi += 2.0*Math.PI; }
+	while (dphi > Math.PI) { dphi -= 2.0*Math.PI; }
+	double x_c = m_trackParam_xc;
+	double y_c = m_trackParam_yc;
+	double radius = m_trackParam_radius;
+	double dz_by_dphi = m_trackParam_dz_by_dphi;
+
+	double a = radius;
+	double b = Math.sqrt(x_c*x_c + y_c*y_c);
+	double r =  barrel_layering_r.get(layer);
+	double cos_phi_minus_theta = (r*r - a*a - b*b) / (2.0*a*b); // Obviously, this blows up if a or b is zero
+	double theta = Math.atan2(y_c, x_c); // in the range (-pi, +pi)
+	double dphi_to_barrel = 0.0;
+	if (cos_phi_minus_theta < -1.0) {
+	    // No solution
+		return null;
+	    } else if (cos_phi_minus_theta == -1.0) {
+		// Unique solution: phi = theta + pi
+		dphi_to_barrel = theta + Math.PI - m_track_phi0;
+		while (dphi_to_barrel < -Math.PI) { dphi_to_barrel += 2.0*Math.PI; }
+		while (dphi_to_barrel > Math.PI) { dphi_to_barrel -= 2.0*Math.PI; }
+	    } else if (cos_phi_minus_theta == 1.0) {
+		// Unique solution: phi = theta
+		dphi_to_barrel = theta - m_track_phi0;
+		while (dphi_to_barrel < -Math.PI) { dphi_to_barrel += 2.0*Math.PI; }
+		while (dphi_to_barrel > Math.PI) { dphi_to_barrel -= 2.0*Math.PI; }
+	    } else if (cos_phi_minus_theta > 1.0) {
+		// No solution
+		return null;
+	    } else {
+		// Two solutions
+		double phi_minus_theta_first_solution = Math.acos(cos_phi_minus_theta); // in the range 0 through pi
+		double phi_minus_theta_second_solution = -phi_minus_theta_first_solution; // in the range -pi through 0
+		double phi_first_solution = phi_minus_theta_first_solution + theta; // in the range (-pi, 2pi)
+		double phi_second_solution = phi_minus_theta_second_solution + theta; // in the range (-2pi, pi)
+		double dphi_to_barrel_firstSolution = phi_first_solution - m_track_phi0;
+		double dphi_to_barrel_secondSolution = phi_second_solution - m_track_phi0;
+		while (dphi_to_barrel_firstSolution < -Math.PI) { dphi_to_barrel_firstSolution += 2.0*Math.PI; }
+		while (dphi_to_barrel_secondSolution < -Math.PI) { dphi_to_barrel_secondSolution += 2.0*Math.PI; }
+		while (dphi_to_barrel_firstSolution > Math.PI) { dphi_to_barrel_firstSolution -= 2.0*Math.PI; }
+		while (dphi_to_barrel_secondSolution > Math.PI) { dphi_to_barrel_secondSolution -= 2.0*Math.PI; }
+		// OK, now which of the two solutions is better?
+		double test_dphi1 = dphi_to_barrel_firstSolution * dphi;
+		double test_dphi2 = dphi_to_barrel_secondSolution * dphi;
+		while (test_dphi1 < 0) { test_dphi1 += 2.0*Math.PI; }
+		while (test_dphi2 < 0) { test_dphi2 += 2.0*Math.PI; }
+		if (test_dphi1 < test_dphi2) {
+		    dphi_to_barrel = dphi_to_barrel_firstSolution;
+		} else {
+		    dphi_to_barrel = dphi_to_barrel_secondSolution;
+		}
+		while (dphi_to_barrel < -Math.PI) { dphi_to_barrel += 2.0*Math.PI; }
+		while (dphi_to_barrel > Math.PI) { dphi_to_barrel -= 2.0*Math.PI; }		    
+	    }
+	    double found_barrel_z = m_track_z0 + dz_by_dphi * dphi_to_barrel;
+	    double found_barrel_phi = m_track_phi0 + dphi_to_barrel;
+	    double found_barrel_x = x_c + radius * Math.cos(found_barrel_phi);
+	    double found_barrel_y = y_c + radius * Math.sin(found_barrel_phi);
+	    double found_barrel_polar_r = Math.sqrt(found_barrel_x*found_barrel_x + found_barrel_y*found_barrel_y);
+	    double found_barrel_polar_phi = Math.atan2(found_barrel_y, found_barrel_x);
+	    boolean validSolution =(found_barrel_z >= barrel_zmin-m_cutSeparation && found_barrel_z <= barrel_zmax+m_cutSeparation);
+	    if (validSolution) {
+		return new BasicHep3Vector(found_barrel_x, found_barrel_y, found_barrel_z);
+	    } else {
+		return null;
+	    }
+    }
+
+    protected Hep3Vector extendToEndcapLayer(int layer, Vector<Double> endcap_layering_z, double endcap_rmin, double endcap_rmax, int nsides )
+    {
+	if (!m_barrelValid && !m_endcapValid) { return null; }
+
+	double dphi = (m_track_phi0 - m_track_phi1);
+	while (dphi < -Math.PI) { dphi += 2.0*Math.PI; }
+	while (dphi > Math.PI) { dphi -= 2.0*Math.PI; }
+	double x_c = m_trackParam_xc;
+	double y_c = m_trackParam_yc;
+	double radius = m_trackParam_radius;
+	double dz_by_dphi = m_trackParam_dz_by_dphi;
+
+	double previous_z = m_trackPoint_z;
+	double new_z = Math.abs(endcap_layering_z.get(layer));
+	if (previous_z < 0) { new_z = -new_z; }
+	double dz = new_z - previous_z;
+	double deltaPhi = dz / m_trackParam_dz_by_dphi;
+	double phi = m_trackPoint_phi + deltaPhi;
+	double found_endcap_x = x_c + radius * Math.cos(phi);
+	double found_endcap_y = y_c + radius * Math.sin(phi);
+	double found_endcap_z = new_z;
+	double found_endcap_polar_r = Math.sqrt(found_endcap_x*found_endcap_x + found_endcap_y*found_endcap_y);
+	boolean validSolution = (found_endcap_polar_r >= endcap_rmin-m_cutSeparation && found_endcap_polar_r <= endcap_rmax+m_cutSeparation);
+	if (validSolution) {
+	    return new BasicHep3Vector(found_endcap_x, found_endcap_y, found_endcap_z);
+	} else {
+	    return null;
+	}
+    }
+
+
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
MIPChargedParticleMaker.java added at 1.1
diff -N MIPChargedParticleMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MIPChargedParticleMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,346 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import hep.physics.particle.Particle;
+import org.lcsim.event.Track;
+import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.mc.fast.tracking.ReconTrack;
+import org.lcsim.event.base.BaseReconstructedParticle;
+import org.lcsim.recon.ztracking.cheater.CheatTrack;
+import org.lcsim.event.MCParticle;
+import org.lcsim.geometry.Detector;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.recon.cluster.util.ClusterEnergyCalculator;
+import hep.physics.particle.properties.ParticleType;
+import org.lcsim.event.ParticleID;
+import org.lcsim.event.base.BaseParticleID;
+import hep.physics.particle.properties.ParticlePropertyManager;
+import org.lcsim.recon.cluster.util.BasicCluster;
+
+/**
+ * Given a list of MIP clusters and a list of tracks,
+ * try to connect tracks to MIP segments and
+ * make a list of charged ReconstructedParticles.
+ *
+ * Each track is matched to exactly zero or one MIP,
+ * and appears in zero or one ReconstructedParticles.
+ * But a MIP/particle may be associated with more than
+ * one track.
+ *
+ * Optionally, the user may supply further list of clusters.
+ * If the MIP is part of one of these clusters (via Cluster.getClusters),
+ * then the entire cluster is added to the ReconstructedParticle instead.
+ * The parent must be unique.
+ *
+ * @version $Id: MIPChargedParticleMaker.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ */
+
+public class MIPChargedParticleMaker extends Driver
+{
+    /** Simple constructor. */
+    public MIPChargedParticleMaker() {
+	m_clusterLists = new HashMap<String,String>();
+    }
+
+    public void setInputTrackList(String name) { m_inputTrackListName = name; }
+    public void setOutputTrackList(String name){ m_outputTrackListName = name; }
+    public void setInputMIPList(String name){ m_inputMIPListName = name; }
+    public void setOutputMIPList(String name){ m_outputMIPListName = name; }
+    public void setOutputParticleList(String name){ m_outputParticleListName = name; }
+    public void setTrackMatcher(TrackClusterMatcher matcher) { m_matcher = matcher; }
+
+    protected boolean m_checkEoverP = false;
+    /**
+     * Enable/disable a check on E/p. If enabled, a calibration must
+     * also be specified with <code>setCalibration()</code>.
+     */
+    public void setCheckEoverP(boolean check) { m_checkEoverP = check; }
+    /** 
+     * Specify an energy calibration. This is only used for the E/p
+     * check, if enabled. Note that setting a calibration doesn't
+     * automatically enable the E/p check.
+     */
+    public void setCalibration(ClusterEnergyCalculator calib) { m_calib = calib; }
+    protected ClusterEnergyCalculator m_calib = null;
+
+
+    public void addClusterList(String inputName, String outputName) {
+	m_clusterLists.put(inputName, outputName);
+    }
+
+    boolean m_debug = false;
+    public void setDebug(boolean debug) { m_debug = debug; }
+
+    protected EventHeader m_event = null;
+    public void process(EventHeader event)
+    {
+	super.process(event);
+	m_event = event;
+
+	// Inputs:
+	List<Track> inputTrackList = event.get(Track.class, m_inputTrackListName);
+	List<Cluster> inputMIPList = event.get(Cluster.class, m_inputMIPListName);
+
+	// Outputs
+	// Initially, all tracks and MIPs are unmatched.
+	Map<Track,Cluster> matchedTracks = new HashMap<Track,Cluster>();
+	List<Track> unmatchedTracks = new Vector<Track>(inputTrackList);
+	List<Cluster> unmatchedMIPs = new Vector<Cluster>(inputMIPList);
+	List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+
+	// Optional inputs and outputs
+	// Output lists are initially identical to the input lists; we will
+	// remove clusters as they are matched.
+	Map<String,List<Cluster>> inputClusterLists = new HashMap<String,List<Cluster>>();
+	Map<String,List<Cluster>> outputClusterLists = new HashMap<String,List<Cluster>>();
+	for (String inputName : m_clusterLists.keySet()) {
+	    String outputName = m_clusterLists.get(inputName);
+	    List<Cluster> inputList = event.get(Cluster.class, inputName);
+	    List<Cluster> outputList = new Vector<Cluster>(inputList);
+	    inputClusterLists.put(inputName, inputList);
+	    outputClusterLists.put(outputName, outputList);
+	}
+
+	// Try to match each track to a MIP segment
+	Map<Cluster,List<Track>> matchedMIPs = new HashMap<Cluster,List<Track>>();
+	for (Track tr : inputTrackList) {
+	    Cluster matchedMIP = m_matcher.matchTrackToCluster(tr, inputMIPList);
+	    if (matchedMIP != null) {
+		// Verify that the returned MIP is a member of inputMIPList
+		if ( !(inputMIPList.contains(matchedMIP)) ) {
+		    throw new AssertionError("Book-keeping error: MIP Matcher must return a member of the input list or null");
+		}
+		if ( !(matchedMIPs.keySet().contains(matchedMIP)) ) {
+		    // First time we've seen this MIP => make its track list
+		    matchedMIPs.put(matchedMIP, new Vector<Track>());
+		}
+		matchedMIPs.get(matchedMIP).add(tr);
+		unmatchedMIPs.remove(matchedMIP);
+		unmatchedTracks.remove(tr);
+		matchedTracks.put(tr, matchedMIP);
+	    }
+	}
+
+	// Optional: Look for each MIP's parent cluster
+	for (Cluster matchedMIP : matchedMIPs.keySet()) {
+	    Cluster uniqueParent = null;
+	    for (String inputListName : m_clusterLists.keySet()) {
+		String outputListName = m_clusterLists.get(inputListName);
+		List<Cluster> inputList = inputClusterLists.get(inputListName);
+		List<Cluster> outputList = outputClusterLists.get(outputListName);
+		
+		for (Cluster clus : inputList) {
+		    List<Cluster> daughters = recursivelyFindSubClusters(clus);
+		    if (daughters.contains(matchedMIP)) {
+			// Found a parent containing this MIP.
+			if (uniqueParent != null) {
+			    throw new AssertionError("Book-keeping error: Non-unique parent of MIP");
+			}
+			uniqueParent = clus;
+		    }
+		}
+		
+		// If we found a unique parent in this list, do book-keeping:
+		//    1) Remove parent from output list of unmatched clusters
+		//    2) Make associated track(s) point to parent cluster
+		if (uniqueParent != null) {
+		    outputList.remove(uniqueParent);
+		    for (Track tr : matchedMIPs.get(matchedMIP)) {
+			matchedTracks.put(tr, uniqueParent); // over-write previous mapping
+		    }
+		}
+	    }
+	}
+
+	// Now that we have the track:cluster association, make output
+	// particles. We have to watch for the special case where >1 track
+	// is matched to a cluster.
+	Map<Cluster,BaseReconstructedParticle> matchedClusters = new HashMap<Cluster,BaseReconstructedParticle> ();
+	for (Track tr : matchedTracks.keySet()) {
+	    Cluster clus = matchedTracks.get(tr);
+	    if ( ! matchedClusters.keySet().contains(clus)) {
+		// This cluster hasn't been used yet -- initialize its particle
+		BaseReconstructedParticle part = new BaseReconstructedParticle();
+		part.addCluster(clus);
+		matchedClusters.put(clus, part);
+	    }
+	    BaseReconstructedParticle part = matchedClusters.get(clus);
+	    part.addTrack(tr);
+	    recomputeKinematics(part);
+	}
+
+	if (m_checkEoverP) {
+	    for (BaseReconstructedParticle part : matchedClusters.values()) {
+		boolean energyOK = checkEoverP(part);
+		if (energyOK) {
+		    outputParticleList.add(part);
+		} else {
+		    // No match => undo association
+		    // Remove track matching...
+		    List<Track> partTracks = part.getTracks();
+		    unmatchedTracks.addAll(partTracks);
+		    // Remove MIP matching...
+		    List<Cluster> partClusters = part.getClusters();
+		    for (Cluster partCluster : partClusters) {
+			List<Cluster> daughters = recursivelyFindSubClusters(partCluster);
+			for (Cluster dauClus : daughters) {
+			    if (inputMIPList.contains(dauClus)) {
+				unmatchedMIPs.add(dauClus);
+			    }
+			}
+		    }	  
+		    // Remove macro cluster matching...
+		    for (String inputListName : m_clusterLists.keySet()) {
+			String outputListName = m_clusterLists.get(inputListName);
+			List<Cluster> inputList = inputClusterLists.get(inputListName);
+			List<Cluster> outputList = outputClusterLists.get(outputListName);
+			for (Cluster partCluster : partClusters) {
+			    if (inputList.contains(partCluster) && !(outputList.contains(partCluster))) {
+				outputList.add(partCluster);
+			    }
+			}
+		    }
+		}
+	    }
+	} else {
+	    outputParticleList.addAll(matchedClusters.values());
+	}
+
+	// Write out
+	event.put(m_outputTrackListName, unmatchedTracks);
+	event.put(m_outputMIPListName, unmatchedMIPs);
+	event.put(m_outputParticleListName, outputParticleList);
+	for (String inputName : m_clusterLists.keySet()) {
+	    String outputName = m_clusterLists.get(inputName);
+	    List<Cluster> outputList = outputClusterLists.get(outputName);
+	    event.put(outputName, outputList);
+	}
+
+	if (m_debug) {
+	    System.out.println("MIPChargedParticleMaker: Read in "+inputTrackList.size()+" tracks and "+inputMIPList.size()+" MIP clusters; wrote out "
+			       +outputParticleList.size()+" matched particles, "+unmatchedTracks.size()+" unmatched tracks, "+unmatchedMIPs.size()+" unmatched MIP clusters");
+	}
+    }
+
+    /**
+     * Internal utility routine
+     */
+    protected List<Cluster> recursivelyFindSubClusters(Cluster clus) 
+    {
+	List<Cluster> output = new Vector<Cluster>();
+	for (Cluster dau : clus.getClusters()) {
+	    output.addAll(recursivelyFindSubClusters(dau));
+	}
+	output.add(clus);
+	return output;
+    }
+
+    protected TrackClusterMatcher m_matcher;
+    protected String m_inputTrackListName;
+    protected String m_outputTrackListName;
+    protected String m_inputMIPListName;
+    protected String m_outputMIPListName;
+    protected String m_outputParticleListName;
+    protected Map<String,String> m_clusterLists;
+
+    protected boolean checkEoverP(BaseReconstructedParticle part) {
+        // We don't expect an exact match due to resolution, energy lost
+        // to fragments etc., but a good portion of the energy should be
+        // in the cluster
+     
+        // Check energy and uncertainty from calorimeter:
+	BasicCluster macroCluster = new BasicCluster();
+	for (Cluster clus : part.getClusters()) {
+	    macroCluster.addCluster(clus);
+	}
+	double estimatedClusterEnergy = estimateClusterEnergy(macroCluster);
+        double estimatedClusterEnergyUncertainty = 0.7 * Math.sqrt(estimatedClusterEnergy); // 70%/sqrt(E) for hadrons
+        
+        // Check energy from tracks
+	double estimatedTrackEnergy = 0.0;
+	for (Track tr : part.getTracks()) {
+	    double[] trackMomentum = tr.getMomentum();
+	    double trackMomentumMagSq = trackMomentum[0]*trackMomentum[0] + trackMomentum[1]*trackMomentum[1] + trackMomentum[2]*trackMomentum[2];
+	    double trackMomentumMag = Math.sqrt(trackMomentumMagSq);
+	    double massPion   = 0.14;
+	    double energyReleaseIfPion = Math.sqrt(trackMomentumMagSq + massPion*massPion);
+	    estimatedTrackEnergy += energyReleaseIfPion;
+	}
+
+        double allowedVariation = 3.0; // 3sigma
+        boolean energyDiffOK_pion       = Math.abs((estimatedTrackEnergy - estimatedClusterEnergy)/estimatedClusterEnergyUncertainty) < allowedVariation;
+        boolean energyDiffOK = energyDiffOK_pion;
+
+	if (m_debug) {
+	    System.out.println("DEBUG [MIP]: Checked E/P; found cluster E="+estimatedClusterEnergy
+			       +" +- "+estimatedClusterEnergyUncertainty
+			       +" and track E="+estimatedTrackEnergy
+			       +" => "+energyDiffOK);
+	}
+
+        return energyDiffOK;
+    }
+
+    private double estimateClusterEnergy(Cluster clus) {
+	return m_calib.getEnergy(clus);
+    }
+
+    // This particle has >= 1 track.
+    // Recompute its 4-vector and mass.
+    protected void recomputeKinematics(BaseReconstructedParticle part) {
+	if (part.getTracks().size() < 1 ) { throw new AssertionError("Charged particle with no tracks: internal consistency failure."); }
+	double energy = 0.0;
+	int charge = 0;
+	double px = 0.0;
+	double py = 0.0;
+	double pz = 0.0;
+	Particle firstTruthParticle = null;
+	for (Track tr : part.getTracks()) {
+	    double[] trackMomentum = tr.getMomentum();
+	    double trackMomentumMagSq = (trackMomentum[0]*trackMomentum[0] + trackMomentum[1]*trackMomentum[1] + trackMomentum[2]*trackMomentum[2]);
+	    double mass = 0.140;
+	    Particle truthParticle = null;
+	    if (tr instanceof ReconTrack) {
+		truthParticle = ((ReconTrack)(tr)).getMCParticle();
+	    } else if (tr instanceof CheatTrack) {
+		truthParticle = ((CheatTrack)(tr)).getMCParticle();
+	    }
+	    if (truthParticle != null) {
+		mass = truthParticle.getMass();
+		if (firstTruthParticle == null) { firstTruthParticle = truthParticle; }
+	    }
+	    double trackEnergy = Math.sqrt(trackMomentumMagSq + mass*mass);
+	    energy += trackEnergy;
+	    charge += tr.getCharge();
+	    px += trackMomentum[0];
+	    py += trackMomentum[1];
+	    pz += trackMomentum[2];
+	}
+	Hep3Vector threeMomentum = new BasicHep3Vector(px,py,pz);
+	HepLorentzVector fourMomentum = new BasicHepLorentzVector(energy, threeMomentum);
+	part.setCharge(charge);
+	part.set4Vector(fourMomentum);
+	part.setMass(fourMomentum.magnitude());
+	if (part.getTracks().size() == 1) {
+	    // Unique track => PID etc somewhat well-defined
+	    ParticleType type = null;
+	    if (firstTruthParticle != null) {
+		type = firstTruthParticle.getType();
+	    } else {
+		int pdg = 211 * charge; // everything is a pion
+		type = ParticlePropertyManager.getParticlePropertyProvider().get(pdg);
+	    }
+	    BaseParticleID pid = new BaseParticleID(type);
+	    part.addParticleID(pid);
+	    part.setParticleIdUsed(pid);
+	    part.setReferencePoint(new BasicHep3Vector(part.getTracks().get(0).getReferencePoint()));
+	} else {
+	    // Multiple tracks => some quantities not well-defined
+	    // Leave at defaults.
+	}
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
MultipleTrackTrack.java added at 1.1
diff -N MultipleTrackTrack.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MultipleTrackTrack.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,53 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*; 
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.event.base.*;
+
+/**
+ * Utility class for the case when multiple tracks point to the same
+ * cluster in the calorimeter and can't be disentangled.
+ *
+ * @version $Id: MultipleTrackTrack.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ * @author [log in to unmask]
+ */
+
+public class MultipleTrackTrack extends BaseTrack {
+    protected Collection<Track> m_tracks;
+
+    /** FIXME: Needs to be protected! Public for now while adjusting contrib... */
+    public MultipleTrackTrack(Collection<Track> tracks) {
+	m_tracks = tracks;
+    }
+    public List<Track> getTracks() { return new Vector<Track>(m_tracks); }
+    public int getCharge() {
+	int chargeSum = 0;
+	for (Track tr : m_tracks) {
+	    chargeSum += tr.getCharge();
+	}
+	return chargeSum;
+    }
+    public double[] getMomentum() {
+	double[] mom = new double[3];
+	mom[0] = this.getPX();
+	mom[1] = this.getPY();
+	mom[2] = this.getPZ();
+	return mom;
+    }
+    public double getPX() {
+	double psum = 0.0;
+	for (Track tr : m_tracks) { psum += tr.getPX(); }
+	return psum;
+    }
+    public double getPY() {
+	double psum = 0.0;
+	for (Track tr : m_tracks) { psum += tr.getPY(); }
+	return psum;
+    }
+    public double getPZ() {
+	double psum = 0.0;
+	for (Track tr : m_tracks) { psum += tr.getPZ(); }
+	return psum;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
SequentialTrackClusterMatcher.java added at 1.1
diff -N SequentialTrackClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SequentialTrackClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,44 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import org.lcsim.event.*;
+import org.lcsim.recon.cluster.mipfinder.*;
+
+/**
+  * A utility class for matching tracks to clusters.
+  *
+  * This class doesn't do the clustering itself -- instead, it takes other
+  * TrackClusterMatchers and delegates to them, trying to match the
+  * track to a cluster using them in sequence.
+  *
+  * @author Mat Charles
+  * @version $Id: SequentialTrackClusterMatcher.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+  */
+
+public class SequentialTrackClusterMatcher implements TrackClusterMatcher {
+    
+    List<TrackClusterMatcher> m_matchers = null;
+
+    /** Simple constructor. */
+    public SequentialTrackClusterMatcher() {
+	m_matchers = new Vector<TrackClusterMatcher>();
+    }
+
+    /** Main interface. */
+    public Cluster matchTrackToCluster(Track tr, List<Cluster> clusters) {
+	for (TrackClusterMatcher matcher : m_matchers) {
+	    Cluster matchedCluster = matcher.matchTrackToCluster(tr, clusters);
+	    if (matchedCluster != null) {
+		// Found a match
+		return matchedCluster;
+	    }
+	}
+	// Nothing found
+	return null;
+    }
+
+    /** Add a TrackClusterMatcher to the end of the list to use. */
+    public void addMatcher(TrackClusterMatcher matcher) {
+	m_matchers.add(matcher);
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
SimpleChargedParticleMaker.java added at 1.1
diff -N SimpleChargedParticleMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SimpleChargedParticleMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,165 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import hep.physics.particle.Particle;
+
+import org.lcsim.event.Track;
+import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
+import org.lcsim.geometry.Detector;
+import org.lcsim.recon.cluster.util.BasicCluster;
+import org.lcsim.recon.cluster.util.TensorClusterPropertyCalculator;
+import org.lcsim.mc.fast.tracking.ReconTrack;
+import org.lcsim.event.base.BaseReconstructedParticle;
+import org.lcsim.recon.ztracking.cheater.CheatTrack;
+import hep.physics.particle.properties.ParticleType;
+import org.lcsim.event.ParticleID;
+import org.lcsim.event.base.BaseParticleID;
+import hep.physics.particle.properties.ParticlePropertyManager;
+
+/**
+ * Given lists of clusters and tracks, make a list of charged
+ * ReconstructedParticles.
+ *
+ * Currently, PID is done by cheating.
+ *
+ * @version $Id: SimpleChargedParticleMaker.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ */
+
+public class SimpleChargedParticleMaker extends Driver
+{
+    /** Simple constructor. */
+    public SimpleChargedParticleMaker() {
+    }
+
+    public void setInputTrackList(String name) { m_inputTrackListName = name; }
+    public void setOutputTrackList(String name){ m_outputTrackListName = name; }
+    public void setInputClusterList(String name){ m_inputClusterListName = name; }
+    public void setOutputParticleList(String name){ m_outputParticleListName = name; }
+    public void setTrackMatcher(TrackClusterMatcher matcher) { m_matcher = matcher; }
+
+    public void process(EventHeader event)
+    {
+	super.process(event);
+
+	// Inputs:
+	List<Track> inputTrackList = event.get(Track.class, m_inputTrackListName);
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+	
+	// Outputs:
+	Map<Track,Cluster> matchedTracks = new HashMap<Track,Cluster>();
+	List<Track> unmatchedTracks = new Vector<Track>(inputTrackList);
+	List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+
+	// Try to match each track to a cluster
+	for (Track tr : inputTrackList) {
+	    Cluster matchedCluster = m_matcher.matchTrackToCluster(tr, inputClusterList);
+	    if (matchedCluster != null) {
+		matchedTracks.put(tr, matchedCluster);
+		unmatchedTracks.remove(tr);
+		// Should we check that the matched cluster is in the
+		// expected list?
+	    }
+	}
+
+	// Now, we need to make output particles for the tracks.
+	// We have to watch out for the special cases where:
+	//    >1 track is matched to a cluster
+	Map<Cluster,BaseReconstructedParticle> matchedClusters = new HashMap<Cluster,BaseReconstructedParticle> ();
+	for (Track tr : matchedTracks.keySet()) {
+	    Cluster clus = matchedTracks.get(tr);
+	    if ( ! matchedClusters.keySet().contains(clus)) {
+		// This cluster hasn't been used yet -- initialize its particle
+		BaseReconstructedParticle part = new BaseReconstructedParticle();
+		part.addCluster(clus);
+		matchedClusters.put(clus, part);
+	    }
+	    // Now update the particle for the track:
+	    BaseReconstructedParticle part = matchedClusters.get(clus);
+	    part.addTrack(tr);
+	    recomputeKinematics(part);
+	}	
+	outputParticleList.addAll(matchedClusters.values());
+
+	// Write out
+	event.put(m_outputTrackListName, unmatchedTracks);
+	event.put(m_outputParticleListName, outputParticleList);
+
+	if (m_debug) {
+	    System.out.println("SimpleChargedParticleMaker: Read in "+inputTrackList.size()+" tracks and "+inputClusterList.size()+" clusters; wrote out "
+			       +outputParticleList.size()+" matched particles and "+unmatchedTracks.size()+" unmatched tracks");
+	}
+    }
+
+    boolean m_debug = false;
+    public void setDebug(boolean debug) { m_debug = debug; }
+
+    protected TrackClusterMatcher m_matcher;
+    String m_inputTrackListName;
+    String m_outputTrackListName;
+    String m_inputClusterListName;
+    String m_outputParticleListName;
+
+    // This particle has >= 1 track.
+    // Recompute its 4-vector and mass.
+    protected void recomputeKinematics(BaseReconstructedParticle part) {
+	if (part.getTracks().size() < 1 ) { throw new AssertionError("Charged particle with no tracks: internal consistency failure."); }
+	double energy = 0.0;
+	int charge = 0;
+	double px = 0.0;
+	double py = 0.0;
+	double pz = 0.0;
+	Particle firstTruthParticle = null;
+	for (Track tr : part.getTracks()) {
+	    double[] trackMomentum = tr.getMomentum();
+	    double trackMomentumMagSq = (trackMomentum[0]*trackMomentum[0] + trackMomentum[1]*trackMomentum[1] + trackMomentum[2]*trackMomentum[2]);
+	    double mass = 0.140;
+	    Particle truthParticle = null;
+	    if (tr instanceof ReconTrack) {
+		truthParticle = ((ReconTrack)(tr)).getMCParticle();
+	    } else if (tr instanceof CheatTrack) {
+		truthParticle = ((CheatTrack)(tr)).getMCParticle();
+	    }
+	    if (truthParticle != null) {
+		mass = truthParticle.getMass();
+		if (firstTruthParticle == null) { firstTruthParticle = truthParticle; }
+	    }
+	    double trackEnergy = Math.sqrt(trackMomentumMagSq + mass*mass);
+	    energy += trackEnergy;
+	    charge += tr.getCharge();
+	    px += trackMomentum[0];
+	    py += trackMomentum[1];
+	    pz += trackMomentum[2];
+	}
+	Hep3Vector threeMomentum = new BasicHep3Vector(px,py,pz);
+	HepLorentzVector fourMomentum = new BasicHepLorentzVector(energy, threeMomentum);
+	part.setCharge(charge);
+	part.set4Vector(fourMomentum);
+	part.setMass(fourMomentum.magnitude());
+	if (part.getTracks().size() == 1) {
+	    // Unique track => PID etc somewhat well-defined
+	    ParticleType type = null;
+	    if (firstTruthParticle != null) {
+		type = firstTruthParticle.getType();
+	    } else {
+		int pdg = 211 * charge; // everything is a pion
+		type = ParticlePropertyManager.getParticlePropertyProvider().get(pdg);
+	    }
+	    BaseParticleID pid = new BaseParticleID(type);
+	    part.addParticleID(pid);
+	    part.setParticleIdUsed(pid);
+	    part.setReferencePoint(new BasicHep3Vector(part.getTracks().get(0).getReferencePoint()));
+	} else {
+	    // Multiple tracks => some quantities not well-defined
+	    // Leave at defaults.
+	}
+    }	
+}
+
+

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
SimpleNeutralParticleMaker.java added at 1.1
diff -N SimpleNeutralParticleMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SimpleNeutralParticleMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,121 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.Hep3Vector;
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.VecOp;
+import hep.physics.vec.BasicHepLorentzVector;
+import hep.physics.vec.HepLorentzVector;
+import hep.physics.particle.properties.ParticlePropertyProvider;
+import hep.physics.particle.properties.ParticlePropertyManager;
+import hep.physics.particle.properties.ParticleType;
+import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.event.base.BaseReconstructedParticle;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.ParticleID;
+import org.lcsim.event.base.BaseParticleID;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.base.BaseReconstructedParticle;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.geometry.Detector;
+import org.lcsim.event.MCParticle;
+import org.lcsim.recon.cluster.util.ClusterEnergyCalculator;
+
+/**
+ * Given lists of clusters, make a list of neutral ReconstructedParticles.
+ *
+ * Currently, PID is predetermined.
+ *
+ * @version $Id: SimpleNeutralParticleMaker.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ */
+
+public class SimpleNeutralParticleMaker extends Driver
+{
+    protected EventHeader m_event;
+
+    /**
+     * Constructor.
+     *
+     * @param pdg The neutral particles are assumed to be of this type when calculating the 4-momentum.
+     */
+    public SimpleNeutralParticleMaker(int pdg) {
+	setParticleID(pdg);
+    }
+
+    // Configure
+    public void setInputClusterList(String name){ m_inputClusterListName = name; }
+    public void setOutputParticleList(String name){ m_outputParticleListName = name; }
+    public void setParticleID(int pdg) { 
+	ParticlePropertyProvider mgr = ParticlePropertyManager.getParticlePropertyProvider();
+	ParticleType type = mgr.get(pdg);
+	m_mass = type.getMass();
+	m_id = new BaseParticleID(type);
+    }
+
+    // Process one event
+    public void process(EventHeader event) 
+    {
+	m_event = event;
+
+	// Input, output:
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+	List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+
+	for (Cluster clus : inputClusterList) {
+	    // Create the particle
+	    BaseReconstructedParticle part = new BaseReconstructedParticle();
+	    part.addCluster(clus);
+	    // The cluster energy now includes, at a crude level, the particle mass.
+	    double clusterEnergy = estimateClusterEnergy(clus);
+	    Hep3Vector threeMomentum = computeMomentum(clusterEnergy, clus);
+	    // Set the other particle properties that are needed to render
+	    // properly in the event display.
+	    HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
+	    part.set4Vector(fourMomentum);
+	    part.setReferencePoint(0,0,0);
+	    part.setCharge(0);
+	    // Set the PID and mass
+	    part.addParticleID(m_id);
+	    part.setParticleIdUsed(m_id);
+	    part.setMass(m_mass);
+	    // Add to the output list
+	    outputParticleList.add(part);
+	}
+
+	event.put(m_outputParticleListName, outputParticleList);
+    }
+
+    Hep3Vector computeMomentum(double energy, Cluster clus) {
+	// Where is the cluster?
+	Hep3Vector pos = new BasicHep3Vector(clus.getPosition());
+	Hep3Vector unitDirection = VecOp.unit(pos);
+	// Now, what's the momentum?
+	// p^2 = E^2 - m^2
+	double momentumSquared = energy*energy - m_mass*m_mass;
+	if (momentumSquared < 0) {
+	    // A low-energy cluster that we think came from a
+	    // massive particle -- it must be very soft.
+	    // Treat as zero momentum.
+	    momentumSquared = 0;
+	}
+	double momentumMagnitude = Math.sqrt(momentumSquared);
+	Hep3Vector momentum = VecOp.mult(momentumMagnitude, unitDirection);
+	return momentum;
+    }
+
+    protected String m_inputClusterListName;
+    protected String m_outputParticleListName;
+    protected double m_mass;
+    protected ParticleID m_id = null;
+
+    protected ClusterEnergyCalculator m_calib = null;
+    protected double estimateClusterEnergy(Cluster clus) {
+	return m_calib.getEnergy(clus);
+    }
+    /** Specify what energy calibration to use. */
+    public void setCalibration(ClusterEnergyCalculator calib) { m_calib = calib; }
+
+    boolean m_debug = false;
+    public void setDebug(boolean debug) { m_debug = debug; }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
SimpleTrackClusterMatcher.java added at 1.1
diff -N SimpleTrackClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SimpleTrackClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,478 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+
+import org.lcsim.util.decision.*;
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
+import org.lcsim.geometry.Detector;
+import org.lcsim.recon.cluster.util.BasicCluster;
+import org.lcsim.recon.cluster.util.TensorClusterPropertyCalculator;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.Track;
+import org.lcsim.util.Driver;
+import org.lcsim.event.MCParticle;
+import hep.physics.particle.Particle;
+import org.lcsim.mc.fast.tracking.ReconTrack;
+import org.lcsim.event.SimCalorimeterHit;
+import org.lcsim.recon.cluster.util.ClusterEnergyCalculator;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Subdetector;
+
+/**
+ * Attempt to match a Track to a Cluster, based on the intercept point
+ * on the ECAL inner surface.
+ *
+ * Currently, the match criteria are hard-code. It would be better to
+ * supply them as DecisionMaker objects.
+ *
+ * @version $Id: SimpleTrackClusterMatcher.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $ 
+ */
+
+public class SimpleTrackClusterMatcher extends Driver implements TrackClusterMatcher
+{
+    protected CalorimeterInformation ci;
+    protected Subdetector emb;
+    protected Subdetector eme;
+    public SimpleTrackClusterMatcher(double cut) {
+	m_separationCut = cut;
+    }
+    public SimpleTrackClusterMatcher() {
+	// Use default cuts on track-cluster matching:
+	//   * Subdet must be EMBarrel or EMEndcap
+	//   * First layer of cluster in ECAL must be <5
+	//   * Track intercept point must be within 30 mm of a cluster hit
+	// For now, E/p cut is handled specially
+    }
+
+    protected HelixSwimmer createSwimmer(Track tr) {
+	HelixSwimmer swimmer = new HelixSwimmer(m_fieldStrength[2]);
+	swimmer.setTrack(tr);
+	return swimmer;
+    }
+
+    /**
+      * Match this track to a cluster from the list supplied.
+      */
+    public Cluster matchTrackToCluster(Track tr, List<Cluster> clusters)
+    {
+	if (m_debug) {
+	    System.out.println("DEBUG: SimpleTrackClusterMatcher.matchTrackToCluster invoked for a list of "+clusters.size()+" clusters.");
+	    double[] trackMom = tr.getMomentum();
+	    double trackMomMag = Math.sqrt(trackMom[0]*trackMom[0] + trackMom[1]*trackMom[1] + trackMom[2]*trackMom[2]);
+	    System.out.println("DEBUG: Track has momentum "+trackMomMag+" --> ("+trackMom[0]+", "+trackMom[1]+", "+trackMom[2]+")");
+	    System.out.println("DEBUG: Track has reference point ("+tr.getReferencePointX()+", "+tr.getReferencePointY()+", "+tr.getReferencePointZ()+")");
+	    System.out.println("DEBUG: Track parameters are:"
+			       +" d0="+tr.getTrackParameter(0) 
+			       +", phi0="+tr.getTrackParameter(1) 
+			       +", omega="+tr.getTrackParameter(2) 
+			       +", z0="+tr.getTrackParameter(3) 
+			       +", s="+tr.getTrackParameter(4) 
+			       );
+	}
+	
+	// Make a HelixSwimmer to propagate the track
+	HelixSwimmer swimmer = createSwimmer(tr);
+
+	// Try swimming to the barrel:
+	double  alphaBarrel = swimToBarrel(swimmer);
+	boolean validBarrel = false;
+	// Try swimming to the endcap:
+	double  alphaEndcap = swimToEndcap(swimmer);
+	boolean validEndcap = false;
+
+	// Fixme: Here we should check that the track really does go all the
+	// way to the ECAL instead of stopping/decaying/interacting earlier.
+	double alpha = Double.NaN; 
+	if (isValidBarrelIntercept(swimmer, alphaBarrel)) {
+	    alpha = alphaBarrel;
+	    validBarrel = true;
+	} else if (isValidEndcapIntercept(swimmer, alphaEndcap)) {
+	    alpha = alphaEndcap;
+	    validEndcap = true;
+	}
+
+	// Did we make a successful extrapolation?
+	if (Double.isNaN(alpha)) {
+	    // No -- failed
+	    if (m_debug) { 
+		System.out.println("DEBUG: "+this.getClass().getName()+": Failed to extrapolate: alphaBarrel="+alphaBarrel+" ("+validBarrel+"), alphaEndcap="+alphaEndcap+" ("+validEndcap+") => alpha="+alpha);
+		double[] trackMom = tr.getMomentum();
+		double trackMomMag = Math.sqrt(trackMom[0]*trackMom[0] + trackMom[1]*trackMom[1] + trackMom[2]*trackMom[2]);
+		if (tr instanceof org.lcsim.mc.fast.tracking.ReconTrack) {
+		    Particle debugTruthMatch = ((ReconTrack)(tr)).getMCParticle();
+		    if (debugTruthMatch != null) {
+			System.out.println("DEBUG: "+this.getClass().getName()+": No track match for track with p="+trackMomMag+" from truth "+debugTruthMatch.getType().getName()+" with E="+debugTruthMatch.getEnergy()+" because extrapolation failed.");
+		    } else {
+			System.out.println("DEBUG: "+this.getClass().getName()+": No track match for track with p="+trackMomMag+" [no truth match] because extrapolation failed.");
+		    }
+		}
+	    }
+	    return null;
+	}
+	if ( !(validEndcap || validBarrel) ) {
+	    // Invalid state
+	    throw new AssertionError("Invalid state: alpha is not NaN, but not a valid barrel or endcap intercept");
+	}
+	if ( validEndcap && validBarrel ) {
+	    throw new AssertionError("Invalid state: barrel="+validBarrel+", endcap="+validEndcap);
+	}
+
+	// If we reach here, we extrapolated to the barrel or
+	// to the endcap successfully.
+	
+	Cluster matchedCluster = findMatchedCluster(tr, swimmer, alpha, clusters);
+	if (matchedCluster != null) {
+	    // Matched OK
+	    if (m_debug) { 
+		Hep3Vector momentumVec = new BasicHep3Vector(tr.getMomentum());
+		System.out.println("DEBUG: "+this.getClass().getName()+": Extrapolated track to cluster (momentum = "+momentumVec.magnitude()+")"); 
+		double[] trackMom = tr.getMomentum();
+		double trackMomMag = Math.sqrt(trackMom[0]*trackMom[0] + trackMom[1]*trackMom[1] + trackMom[2]*trackMom[2]);
+		if (tr instanceof org.lcsim.mc.fast.tracking.ReconTrack) {
+		    Particle debugTruthMatch = ((ReconTrack)(tr)).getMCParticle();
+		    if (debugTruthMatch != null) {
+			System.out.println("DEBUG: "+this.getClass().getName()+": Track match for track with p="+trackMomMag+" from truth "+debugTruthMatch.getType().getName()+" with E="+debugTruthMatch.getEnergy());
+		    } else {
+			System.out.println("DEBUG: "+this.getClass().getName()+": Track match for track with p="+trackMomMag+" [no truth match]");
+		    }
+		}
+	    }
+	    return matchedCluster;
+	} else {
+	    // No match found
+	    if (m_debug) { 
+		Hep3Vector momentumVec = new BasicHep3Vector(tr.getMomentum());
+		System.out.println("DEBUG: "+this.getClass().getName()+": Failed to extrapolate track (momentum = "+momentumVec.magnitude()+")"); 
+		double[] trackMom = tr.getMomentum();
+		double trackMomMag = Math.sqrt(trackMom[0]*trackMom[0] + trackMom[1]*trackMom[1] + trackMom[2]*trackMom[2]);
+		if (tr instanceof org.lcsim.mc.fast.tracking.ReconTrack) {
+		    Particle debugTruthMatch = ((ReconTrack)(tr)).getMCParticle();
+		    if (debugTruthMatch != null) {
+			System.out.println("DEBUG: "+this.getClass().getName()+": No track match for track with p="+trackMomMag+" from truth "+debugTruthMatch.getType().getName()+" with E="+debugTruthMatch.getEnergy());
+		    } else {
+			System.out.println("DEBUG: "+this.getClass().getName()+": No track match for track with p="+trackMomMag+" [no truth match]");
+		    }
+		}
+	    }
+
+	    return null;
+	}
+    }
+    
+    protected boolean m_checkEoverP = false;
+    public void setCheckEoverP(boolean checkEoverP) {
+	m_checkEoverP = checkEoverP;
+	if (m_debug) { System.out.println("DEBUG: set m_checkEoverP to "+m_checkEoverP); }
+    }
+
+    protected double swimToBarrel(HelixSwimmer swimmer) {
+	// Look for a hit in the first layer of the ECAL barrel
+	return swimmer.getDistanceToRadius(m_ECAL_barrel_r);
+    }
+    protected double swimToEndcap(HelixSwimmer swimmer) {
+	// Look for a hit in the first layer of the ECAL endcap
+	double distanceToEndcap1 = swimmer.getDistanceToZ(m_ECAL_endcap_z);
+	double distanceToEndcap2 = swimmer.getDistanceToZ(-m_ECAL_endcap_z);
+	if (distanceToEndcap1>0) {
+	    return distanceToEndcap1;
+	} else {
+	    return distanceToEndcap2;
+	}
+    }
+    protected boolean isValidBarrelIntercept(HelixSwimmer swimmer, double alpha) {
+	// Must have -m_ECAL_barrel_z <= z <= +m_ECAL_barrel_z (within errors)
+	double uncertainty = m_separationCut;
+	Hep3Vector intercept = swimmer.getPointAtDistance(alpha);
+	double z = intercept.z();
+	boolean zInRange = (z >= m_ECAL_barrel_zmin-uncertainty && z <= m_ECAL_barrel_zmax+uncertainty);
+	return zInRange;
+    }
+    protected boolean isValidEndcapIntercept(HelixSwimmer swimmer, double alpha) {
+	// Must have m_ECAL_endcap_rmin <= r <= m_ECAL_endcap_rmax (within errors)
+	double uncertainty = m_separationCut;
+	Hep3Vector intercept = swimmer.getPointAtDistance(alpha);
+	double r = Math.sqrt(intercept.x()*intercept.x() + intercept.y()*intercept.y());
+	boolean rInRange = (r >= m_ECAL_endcap_rmin-uncertainty && r <= m_ECAL_endcap_rmax+uncertainty);
+	return rInRange;
+    }
+
+    protected Cluster findMatchedCluster(Track tr, HelixSwimmer swimmer, double alpha, List<Cluster> clusters) 
+    {
+	if (m_debug) { System.out.println("DEBUG: SimpleTrackClusterMatched.findMatchedCluster() invoked for a list of "+clusters.size()+" clusters."); }
+	// Find the track intercept and direction
+	swimmer.setTrack(tr);
+	Hep3Vector trackPoint = swimmer.getPointAtDistance(alpha);
+	
+	List<Cluster> nearestClusters = findNearestClusters(trackPoint, clusters);
+	for (Cluster nearbyCluster : nearestClusters) {
+	    // Obtain geometrical info:
+	    CalorimeterHit nearestHit = findNearestHit(trackPoint, nearbyCluster);
+	    double separation = proximity(trackPoint, nearestHit);
+	    CalorimeterHit firstHitInECAL = findInnermostHitInECAL(nearbyCluster);
+	    org.lcsim.geometry.Subdetector subdet = nearestHit.getSubdetector();
+	    // Make cuts:
+	    boolean goodSubDet = (subdet == emb) || (subdet == eme);
+	    boolean goodFirstLayer = (firstHitInECAL!=null && getVLayer(firstHitInECAL) < 5);
+	    double separationCut = m_separationCut;
+	    boolean goodSeparation = (separation < separationCut);
+	    boolean foundMatch = goodSubDet && goodFirstLayer && goodSeparation;
+	    if (m_debug) { 
+		String printme = new String();
+		printme += "Debug: Match track to cluster = ["+foundMatch+"] since";
+		printme += " subdet="+subdet.getName()+" ["+goodSubDet+"] and";
+		if (firstHitInECAL!=null) {
+		    printme += " firstlayer="+getVLayer(firstHitInECAL)+" ["+goodFirstLayer+"] and";
+		} else {
+		    printme += " firstlayer=null ["+goodFirstLayer+"] and";
+		}
+		printme += " separation="+separation+" ["+goodSeparation+"]";
+		System.out.println(printme);
+		String debugContributions = new String();
+		debugContributions += "DEBUG: Cluster contents:";
+		Map<MCParticle, List<CalorimeterHit>> tmpMap = new HashMap<MCParticle, List<CalorimeterHit>>();
+		for (CalorimeterHit hit : nearbyCluster.getCalorimeterHits()) {
+		    SimCalorimeterHit simhit = (SimCalorimeterHit) (hit);
+		    for (int i=0; i<simhit.getMCParticleCount(); i++) {
+			MCParticle hitPart = simhit.getMCParticle(i);
+			if ( ! (tmpMap.keySet().contains(hitPart)) ) {
+			    tmpMap.put(hitPart, new Vector<CalorimeterHit>());
+			}
+			tmpMap.get(hitPart).add(hit);
+		    }
+		}
+		for (MCParticle hitPart : tmpMap.keySet()) {
+		    debugContributions += " ";
+		    debugContributions += hitPart.getType().getName();
+		    debugContributions += " (E=";
+		    debugContributions += hitPart.getEnergy();
+		    debugContributions += ", hits=";
+		    debugContributions += tmpMap.get(hitPart).size();
+		    debugContributions += ")";
+		}
+		System.out.println(debugContributions);
+	    }
+	    if (foundMatch) {
+		// Geometrically, it looks good.
+		// Is it sensible in terms of energy?
+	
+		boolean energyOK = true; // By default, always pass
+		if (m_checkEoverP) {
+		    boolean passesEoverPcut = checkEoverP(nearbyCluster, tr);
+		    energyOK = energyOK && passesEoverPcut;
+		}
+		
+		if (energyOK) {
+		    // Matches OK
+		    return nearbyCluster;
+		} else {
+		    // This cluster isn't a good match -- ignore it.
+		}
+	    }
+	}
+	// No match
+	return null;
+    }
+
+    protected boolean checkEoverP(Cluster nearbyCluster, Track tr) {
+	// We don't expect an exact match due to resolution, energy lost
+	// to fragments etc., but a good portion of the energy should be
+	// in the cluster
+	
+	// Check energy and uncertainty from calorimeter:
+	double estimatedClusterEnergy = estimateClusterEnergy(nearbyCluster);
+	double estimatedClusterEnergyUncertainty = 0.7 * Math.sqrt(estimatedClusterEnergy); // 70%/sqrt(E) for hadrons
+	
+	// Check energy from track
+	double[] trackMomentum = tr.getMomentum();
+	double trackMomentumMagSq = trackMomentum[0]*trackMomentum[0] + trackMomentum[1]*trackMomentum[1] + trackMomentum[2]*trackMomentum[2];
+	double trackMomentumMag = Math.sqrt(trackMomentumMagSq);
+	double massPion   = 0.14;
+	double massProton = 0.94;
+	double energyReleaseIfPion = Math.sqrt(trackMomentumMagSq + massPion*massPion);
+	double energyReleaseIfProton = trackMomentumMag;
+	double energyReleaseIfAntiproton = trackMomentumMag + massProton + massProton;
+	
+	double allowedVariation = 3.0; // 3sigma
+	boolean energyDiffOK_pion       = Math.abs((energyReleaseIfPion - estimatedClusterEnergy)/estimatedClusterEnergyUncertainty) < allowedVariation;
+	boolean energyDiffOK_proton     = Math.abs((energyReleaseIfProton - estimatedClusterEnergy)/estimatedClusterEnergyUncertainty) < allowedVariation;
+	boolean energyDiffOK_antiproton = Math.abs((energyReleaseIfAntiproton - estimatedClusterEnergy)/estimatedClusterEnergyUncertainty) < allowedVariation;
+	boolean energyDiffOK = energyDiffOK_pion || energyDiffOK_proton || energyDiffOK_antiproton;
+	
+	//boolean fractionEnergyOK = estimatedClusterEnergy > 0.5*trackMomentumMag; // don't use
+	//boolean absoluteEnergyOK = (trackMomentumMag - estimatedClusterEnergy < 0.2); // deliberately one-sided (old... is this right?)
+
+	if (m_debug) {
+	    System.out.println("DEBUG [gen]: Checked E/P; found cluster E="+estimatedClusterEnergy
+			       +" +- "+estimatedClusterEnergyUncertainty
+			       +" and track E="+energyReleaseIfPion+" ("+energyReleaseIfProton+"/"+energyReleaseIfAntiproton+")"
+			       +" => "+energyDiffOK);
+	    System.out.println("DEBUG: Comparing track with momentum "+trackMomentumMag+" to a cluster with estimated energy "+estimatedClusterEnergy+" +- "+estimatedClusterEnergyUncertainty);
+	    System.out.println("   If pi+:  |"+energyReleaseIfPion+" - "+estimatedClusterEnergy+"|/"+estimatedClusterEnergyUncertainty+" = "+Math.abs((energyReleaseIfPion - estimatedClusterEnergy)/estimatedClusterEnergyUncertainty)+" => "+energyDiffOK_pion);
+	    System.out.println("   If p:    |"+energyReleaseIfProton+" - "+estimatedClusterEnergy+"|/"+estimatedClusterEnergyUncertainty+" = "+Math.abs((energyReleaseIfProton - estimatedClusterEnergy)/estimatedClusterEnergyUncertainty)+" => "+energyDiffOK_proton);
+	    System.out.println("   If pbar: |"+energyReleaseIfAntiproton+" - "+estimatedClusterEnergy+"|/"+estimatedClusterEnergyUncertainty+" = "+Math.abs((energyReleaseIfAntiproton - estimatedClusterEnergy)/estimatedClusterEnergyUncertainty)+" => "+energyDiffOK_antiproton);
+	    System.out.println("   => Overall: "+energyDiffOK);
+	}		    
+	
+	return energyDiffOK;
+    }
+
+    protected List<Cluster> findNearestClusters(Hep3Vector point, List<Cluster> clusterList)
+    {
+	Map<Cluster,Double> mapClusterToDistance = new HashMap<Cluster, Double>();
+	List<Cluster> sortedListOfClusters = new Vector<Cluster>();
+	for (Cluster clus : clusterList) {
+	    double dist = proximity(point, clus);
+	    mapClusterToDistance.put(clus, new Double(dist));
+	    sortedListOfClusters.add(clus);
+	}
+	Comparator<Cluster> comp = new CompareMapping<Cluster>(mapClusterToDistance);
+	Collections.sort(sortedListOfClusters, comp);
+	return sortedListOfClusters;
+    }
+    protected CalorimeterHit findNearestHit(Hep3Vector point, Cluster clus) 
+    {
+	CalorimeterHit nearest = null;
+	double minDist = 0;
+	for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+	    Hep3Vector hitPosition = new BasicHep3Vector(hit.getPosition());
+	    double distance = VecOp.sub(hitPosition, point).magnitude();
+	    if (distance<minDist || nearest==null) {
+		nearest = hit;
+		minDist = distance;
+	    }
+	}
+	return nearest;
+    }
+
+    protected CalorimeterHit findInnermostHitInECAL(Cluster clus) {
+	CalorimeterHit innermostHit = null;
+	for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+	    int layer = getVLayer(hit);
+	    org.lcsim.geometry.Subdetector subdet = hit.getSubdetector();	    
+            if ( ! subdet.isCalorimeter() ) { throw new AssertionError("Cluster hit outside calorimeter"); }
+            if (subdet == emb || subdet == eme) {
+                // EM -- OK
+		if (innermostHit==null || getVLayer(innermostHit)>layer) {
+		    innermostHit = hit;
+		}
+	    }
+	}
+	return innermostHit;
+    }
+
+    protected double proximity(Hep3Vector point, Cluster clus) {
+	CalorimeterHit nearestHit = findNearestHit(point, clus);
+	return proximity(point, nearestHit);
+    }
+    protected double proximity(Hep3Vector point, CalorimeterHit hit) {
+	Hep3Vector hitPosition = new BasicHep3Vector(hit.getPosition());
+	double distance = VecOp.sub(hitPosition, point).magnitude();
+	return distance;
+    }
+
+    protected double findUnitDotProduct(Hep3Vector tangent, Cluster clus) 
+    {
+	// Find the cluster direction
+	BasicCluster copy = new BasicCluster();
+	copy.addCluster(clus);
+	TensorClusterPropertyCalculator calc = new TensorClusterPropertyCalculator();
+	copy.setPropertyCalculator(calc);
+	copy.calculateProperties();
+	double[][]axes = calc.getPrincipleAxis();
+	Hep3Vector clusterDir = new BasicHep3Vector(axes[0][0], axes[0][1], axes[0][2]);
+	// Get the dot product:
+	double unitDotProduct = VecOp.dot(tangent, clusterDir) / (tangent.magnitude() * clusterDir.magnitude());
+	return unitDotProduct;
+    }
+    protected int getVLayer(CalorimeterHit hit) {
+	org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
+	id.setID(hit.getCellID());
+	int layer = id.getVLayer();
+	return layer;
+    }
+
+    protected EventHeader m_event;
+    public void process(EventHeader event) {
+	m_event = event;
+	initGeometry(event);
+    }
+
+    public void setDebug(boolean debug) {
+	m_debug = debug;
+    }
+
+    public void initGeometry(EventHeader event) 
+    {
+        if(!m_init)
+        {
+            if(ci == null)
+            {
+                ci = CalorimeterInformation.instance();
+                emb = ci.getSubdetector(CalorimeterType.EM_BARREL);
+                eme = ci.getSubdetector(CalorimeterType.EM_ENDCAP);
+            }
+            m_ECAL_barrel_zmin = ci.getZMin(CalorimeterType.EM_BARREL);
+            m_ECAL_barrel_zmax = ci.getZMax(CalorimeterType.EM_BARREL);
+            m_ECAL_barrel_r = emb.getLayering().getDistanceToLayerSensorMid(0);
+            m_ECAL_endcap_z = eme.getLayering().getDistanceToLayerSensorMid(0);
+            m_ECAL_endcap_rmin = ci.getRMin(CalorimeterType.EM_ENDCAP);
+            m_ECAL_endcap_rmax = ci.getRMax(CalorimeterType.EM_ENDCAP);
+            double[] zero = {0, 0, 0};
+            m_fieldStrength = event.getDetector().getFieldMap().getField(zero);
+            m_init = true;
+            if (m_debug) {
+                System.out.println(this.getClass().getName()+": Init: ECAL barrel zmin="+m_ECAL_barrel_zmin);
+                System.out.println(this.getClass().getName()+": Init: ECAL barrel zmax="+m_ECAL_barrel_zmax);
+                System.out.println(this.getClass().getName()+": Init: ECAL barrel r="+m_ECAL_barrel_r);
+                System.out.println(this.getClass().getName()+": Init: ECAL endcap z="+m_ECAL_endcap_z);
+                System.out.println(this.getClass().getName()+": Init: ECAL endcap rmin="+m_ECAL_endcap_rmin);
+                System.out.println(this.getClass().getName()+": Init: ECAL endcap rmax="+m_ECAL_endcap_rmax);
+            }
+        }
+    }
+
+    protected boolean m_init = false;
+    protected double m_ECAL_barrel_zmin;
+    protected double m_ECAL_barrel_zmax;
+    protected double m_ECAL_barrel_r;
+    protected double m_ECAL_endcap_z;
+    protected double m_ECAL_endcap_rmin;
+    protected double m_ECAL_endcap_rmax;
+    protected boolean m_debug = false;
+    protected double[] m_fieldStrength;
+
+    private class CompareMapping<T> implements Comparator<T> {
+	public CompareMapping(Map<T,Double> map) {
+	    m_map = map;
+	}
+	public int compare(Object o1, Object o2) {
+	    Cluster c1 = (Cluster) o1;
+	    Cluster c2 = (Cluster) o2;
+	    Double D1 = m_map.get(c1);
+	    Double D2 = m_map.get(c2);
+	    if (D1.equals(D2)) {
+		// Equal
+		return 0;
+	    } else if (D1.doubleValue() < D2.doubleValue()) {
+		return -1;
+	    } else {
+		return +1;
+	    }
+	}
+	Map<T,Double> m_map;
+    }
+
+    protected ClusterEnergyCalculator m_calib = null;
+    protected double estimateClusterEnergy(Cluster clus) {
+	return m_calib.getEnergy(clus);
+    }
+    /** Specify what energy calibration to use for E/P check. */
+    public void setCalibration(ClusterEnergyCalculator calib) { m_calib = calib; }
+
+    protected double m_separationCut = 30.0; // arbitrary
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
SimpleTrackMIPClusterMatcher.java added at 1.1
diff -N SimpleTrackMIPClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SimpleTrackMIPClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,72 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.Track;
+
+/**
+ * Attempt to match a Track to a MIP-like Cluster, based on the intercept point
+ * on the ECAL inner surface and on the direction of the track at the
+ * intercept point.
+ *
+ * @version $Id: SimpleTrackMIPClusterMatcher.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $ 
+ */
+
+public class SimpleTrackMIPClusterMatcher extends SimpleTrackClusterMatcher
+{
+    protected Cluster findMatchedCluster(Track tr, HelixSwimmer swimmer, double alpha, List<Cluster> mips)     
+    {
+	if (m_debug) { System.out.println("DEBUG: SimpleTrackMIPClusterMatched.findMatchedCluster() invoked for a list of "+mips.size()+" clusters."); }
+
+	// Find the track intercept and direction
+	swimmer.setTrack(tr);
+	Hep3Vector trackPoint = swimmer.getPointAtDistance(alpha);
+	// Obtain the unit vector giving the tangent:
+	double delta = 0.1;
+	if (alpha < 0) { delta *= -1.0; }
+	Hep3Vector aLittleFurther = swimmer.getPointAtDistance(alpha+delta);
+	Hep3Vector tangent = VecOp.unit(VecOp.sub(aLittleFurther, trackPoint));
+
+	List<Cluster> nearestMIPs = findNearestClusters(trackPoint, mips);
+	for (Cluster nearbyMIP : nearestMIPs) {
+	    // Obtain geometrical info:
+	    CalorimeterHit nearestHit = findNearestHit(trackPoint, nearbyMIP);
+	    double separation = proximity(trackPoint, nearestHit);
+	    CalorimeterHit firstHitInECAL = findInnermostHitInECAL(nearbyMIP);
+	    double unitDotProduct = findUnitDotProduct(tangent, nearbyMIP);
+	    org.lcsim.geometry.Subdetector subdet = nearestHit.getSubdetector();
+	    // Make cuts:
+	    boolean goodSubDet = (subdet == emb) || (subdet == eme);
+	    boolean goodFirstLayer = (firstHitInECAL!=null && getVLayer(firstHitInECAL) < 5);
+	    boolean goodDotProduct = (Math.abs(unitDotProduct) > 0.85);
+	    double separationCut = m_separationCut;
+	    boolean goodSeparation = (separation < separationCut);
+	    boolean foundMatch = goodSubDet && goodFirstLayer && goodDotProduct && goodSeparation;
+	    if (foundMatch) {
+		// OK, made a good match
+		if (m_debug) { System.out.println("DEBUG: Matched cluster to MIP since subdet="+subdet.getName()+" and firstlayer="+getVLayer(firstHitInECAL)+" and dotProduct="+Math.abs(unitDotProduct)+" and separation="+separation); }
+		return nearbyMIP;
+	    } else {
+		if (m_debug) {
+		    String printme = new String();
+		    printme += "Debug: Didn't match track to MIP cluster since";
+		    printme += " subdet="+subdet.getName()+" ["+goodSubDet+"] and";
+		    if (firstHitInECAL!=null) {
+			printme += " firstlayer="+getVLayer(firstHitInECAL)+" ["+goodFirstLayer+"] and";
+		    } else {
+			printme += " firstlayer=null ["+goodFirstLayer+"] and";
+		    }
+		    printme += " dotProduct="+Math.abs(unitDotProduct)+" ["+goodDotProduct+"] and";
+		    printme += " separation="+separation+" ["+goodSeparation+"]";
+		    System.out.println(printme);
+		}
+	    }
+	}
+	// No match
+	return null;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
SmallPhotonMaker.java added at 1.1
diff -N SmallPhotonMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SmallPhotonMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,144 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.Hep3Vector;
+import hep.physics.vec.BasicHepLorentzVector;
+import hep.physics.vec.HepLorentzVector;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.FragmentIdentifier;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.ReconstructedParticle;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.SimCalorimeterHit;
+import org.lcsim.event.base.BaseReconstructedParticle;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Subdetector;
+
+/**
+ * Given a list of clusters, make ones that are consistent with
+ * fragments AND pass photon selection into small photon clusters.
+ *
+ * Photon selection: Innermost hit is in layer 0-3 of ECAL
+ *
+ * FIXME: I think this could be implemented as a combination of a
+ * filter and SimpleNeutralParticleMaker without the need for a 
+ * new class.
+ */
+
+public class SmallPhotonMaker extends SimpleNeutralParticleMaker
+{
+    protected FragmentIdentifier m_fragID = null;
+    protected CalorimeterInformation ci;
+    protected Subdetector emb;
+    protected Subdetector eme;
+    public SmallPhotonMaker(FragmentIdentifier fragID) {
+	super(22); // make photons
+	m_fragID = fragID;
+    }
+
+    public void process(EventHeader event) 
+    {
+	m_event = event;
+
+	// Input, output:
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+	List<Cluster> outputClusterList = new Vector<Cluster>();
+	outputClusterList.addAll(inputClusterList); // initially full
+	List<ReconstructedParticle> outputParticleList = new Vector<ReconstructedParticle>();
+
+	for (Cluster clus : inputClusterList) {
+	    if (m_fragID.isFragment(clus, event)) {
+		// Small cluster/fragment -- is it photon-like?
+		CalorimeterHit firstHitInECAL = findInnermostHitInECAL(clus);
+		boolean isPhoton = (firstHitInECAL!=null && getVLayer(firstHitInECAL)<4);
+		if (isPhoton) {
+		    BaseReconstructedParticle part = new BaseReconstructedParticle();
+		    part.addCluster(clus);
+		    double clusterEnergy = estimateClusterEnergy(clus);
+		    Hep3Vector threeMomentum = computeMomentum(clusterEnergy, clus);
+		    // Set the other particle properties that are needed to render
+		    // properly in the event display.
+		    HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
+		    part.set4Vector(fourMomentum);
+		    part.setReferencePoint(0,0,0);
+		    part.setCharge(0);
+		    // Add to the output list
+		    outputParticleList.add(part);
+		    outputClusterList.remove(clus);
+		}
+		if (m_debug) {
+		    String printme = new String();
+		    printme += "DEBUG: This fragment with ";
+		    printme += clus.getCalorimeterHits().size();
+		    printme += " hits has first ECAL layer ";
+		    if (firstHitInECAL==null) {
+			printme += "[null]";
+		    } else {
+			printme += getVLayer(firstHitInECAL);
+		    }
+		    printme += " => isPhoton="+isPhoton;
+		    printme += ". True contributions: ";
+		    Map<MCParticle, List<CalorimeterHit>> tmpMap = new HashMap<MCParticle, List<CalorimeterHit>>();
+		    for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+			SimCalorimeterHit simhit = (SimCalorimeterHit) (hit);
+			for (int i=0; i<simhit.getMCParticleCount(); i++) {
+			    MCParticle hitPart = simhit.getMCParticle(i);
+			    if ( ! (tmpMap.keySet().contains(hitPart)) ) {
+				tmpMap.put(hitPart, new Vector<CalorimeterHit>());
+			    }
+			    tmpMap.get(hitPart).add(hit);
+			}
+		    }
+		    for (MCParticle hitPart : tmpMap.keySet()) {
+			printme += " ";
+			printme += hitPart.getType().getName();
+			printme += " (E=";
+			printme += hitPart.getEnergy();
+			printme += ", hits=";
+			printme += tmpMap.get(hitPart).size();
+			printme += ")";
+		    }
+		    System.out.println(printme);
+		}
+	    }
+	}
+
+	event.put(m_outputParticleListName, outputParticleList, ReconstructedParticle.class, 0);
+	event.put(m_outputClusterListName, outputClusterList, Cluster.class, 0);
+    }
+
+    protected String m_outputClusterListName;
+    public void setOutputClusterList(String name) { m_outputClusterListName = name; }
+
+
+    protected CalorimeterHit findInnermostHitInECAL(Cluster clus) {
+        if(ci == null)
+        {
+            ci = CalorimeterInformation.instance();
+            emb = ci.getSubdetector(CalorimeterType.EM_BARREL);
+            eme = ci.getSubdetector(CalorimeterType.EM_ENDCAP);
+        }
+        CalorimeterHit innermostHit = null;
+        for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+            int layer = getVLayer(hit);
+            Subdetector subdet = hit.getSubdetector();           
+            if ( ! subdet.isCalorimeter() ) { throw new AssertionError("Cluster hit outside calorimeter"); }
+            if (subdet == eme || subdet == emb) {
+                // EM -- OK
+                if (innermostHit==null || getVLayer(innermostHit)>layer) {
+                    innermostHit = hit;
+                }
+            }
+        }
+        return innermostHit;
+    }
+    
+    protected int getVLayer(CalorimeterHit hit) {
+        org.lcsim.geometry.IDDecoder id = hit.getIDDecoder();
+        id.setID(hit.getCellID());
+        int layer = id.getVLayer();
+        return layer;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackClusterMatcher.java added at 1.1
diff -N TrackClusterMatcher.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackClusterMatcher.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,26 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.Track;
+
+/**
+  * Interface for matching Tracks to Clusters
+  *
+  * @version $Id: TrackClusterMatcher.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+  */
+
+public interface TrackClusterMatcher
+{
+    /**
+     * Attempt to match a Track <code>tr</code> to a Cluster 
+     * from the list <code>clusters</code>. The return value
+     * is the matched Cluster, or null if there is no acceptable
+     * match found.
+     *
+     * Implementations typically return a cluster from the list supplied.
+     * They may also return a new cluster which contains clusters 
+     * (via Cluster.getClusters()) from this list (and no other hits/clusters). 
+     */
+    public Cluster matchTrackToCluster(Track tr, List<Cluster> clusters);
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackHelixExtrapolator.java added at 1.1
diff -N TrackHelixExtrapolator.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackHelixExtrapolator.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,232 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.Track;
+
+public class TrackHelixExtrapolator extends HelixExtrapolator
+{
+    protected Hep3Vector m_intercept = null;
+    protected HelixSwimmer m_swimmer = null;
+    protected double m_alphaIntercept = Double.NaN;
+
+    public TrackHelixExtrapolator() {
+	super();
+    }
+
+    protected TrackHelixExtrapolator(TrackHelixExtrapolator old) {
+	super(old);
+	if (old.m_intercept != null) {
+	    m_intercept = new BasicHep3Vector(old.m_intercept.x(), old.m_intercept.y(), old.m_intercept.z());
+	} else {
+	    m_intercept = null;
+	}
+	m_swimmer = old.m_swimmer; // Safe, since any change to track will create a new HelixSwimmer.
+	m_alphaIntercept = old.m_alphaIntercept;
+    }
+
+    public void process(EventHeader event) {
+	super.process(event);
+    }
+
+    protected Hep3Vector getInterceptPoint() {
+	return new BasicHep3Vector(m_intercept.x(), m_intercept.y(), m_intercept.z());
+    }
+    public HelixExtrapolationResult performExtrapolation(Track tr) {
+	m_track = tr;
+	// Null track means we blank everything and return failure.
+	if (tr == null) {
+	    m_intercept = null;
+	    m_swimmer = null;
+	    m_alphaIntercept = Double.NaN;
+	    return null;
+	}
+
+	// Make a HelixSwimmer to propagate the track
+	m_swimmer = new HelixSwimmer(m_fieldStrength[2]);
+	m_swimmer.setTrack(tr);
+	
+        // Try swimming to the barrel:
+        double  alphaBarrel = swimToBarrel(m_swimmer);
+        boolean validBarrel = false;
+        // Try swimming to the endcap:
+        double  alphaEndcap = swimToEndcap(m_swimmer);
+        boolean validEndcap = false;
+
+	// Get helix fit output
+	m_alphaIntercept = Double.NaN; 
+        if (isValidBarrelIntercept(m_swimmer, alphaBarrel, m_cutSeparation)) {
+            validBarrel = true;
+        }
+	if (isValidEndcapIntercept(m_swimmer, alphaEndcap, m_cutSeparation)) {
+            validEndcap = true;
+        }
+
+	// Check for special case in corner of barrel/endcap overlap region
+	if ( validEndcap && validBarrel ) {
+	    // Both apparently valid... check again
+	    boolean tightValidEndcap = isValidEndcapIntercept(m_swimmer, alphaEndcap, 0.0);
+	    boolean tightValidBarrel = isValidBarrelIntercept(m_swimmer, alphaBarrel, 0.0);
+	    if (tightValidEndcap && tightValidBarrel) {
+		// This can happen if the track has moderate pT -- it goes into the
+		// barrel, spirals out again, and eventually hits the endcap.
+		// If this is what happened then it should reach the barrel first.
+		if (alphaEndcap < alphaBarrel) {
+		    Hep3Vector interceptEndcap = m_swimmer.getPointAtDistance(alphaEndcap);
+		    Hep3Vector interceptBarrel = m_swimmer.getPointAtDistance(alphaBarrel);
+		    double rEndcap = Math.sqrt(interceptEndcap.x()*interceptEndcap.x() + interceptEndcap.y()*interceptEndcap.y());
+		    double rBarrel = Math.sqrt(interceptBarrel.x()*interceptBarrel.x() + interceptBarrel.y()*interceptBarrel.y());
+		    double zEndcap = interceptEndcap.z();
+		    double zBarrel = interceptBarrel.z();
+		    System.out.println(this.getClass().getName()+": Track extrapolation failure: Track hits endcap THEN barrel -- this doesn't make sense!");
+		    System.out.println("   Endcap intercept at alpha="+alphaEndcap+" has r="+rEndcap+", z="+zEndcap);
+		    System.out.println("   Barrel intercept at alpha="+alphaBarrel+" has r="+rBarrel+", z="+zBarrel);
+		    Hep3Vector p3 = new BasicHep3Vector(tr.getMomentum());
+		    double p = p3.magnitude();
+		    double pt = Math.sqrt(p3.x()*p3.x() + p3.y()*p3.y());
+		    System.out.println("   Track has p="+p+" and pt="+pt);
+		    validEndcap = validBarrel = false;
+		    m_intercept = null;
+		    m_swimmer = null;
+		    m_alphaIntercept = Double.NaN;
+		    return null;
+		} else {
+		    validEndcap = false;
+		}
+	    } else if (!tightValidEndcap && !tightValidBarrel) {
+		throw new AssertionError("Invalid state");
+	    } else {
+		// Only one valid solution -- OK
+		validEndcap = tightValidEndcap;
+		validBarrel = tightValidBarrel;
+	    }
+	}
+
+	if (validEndcap) { m_alphaIntercept = alphaEndcap; }
+	if (validBarrel) { m_alphaIntercept = alphaBarrel; }
+
+	// Did we make a successful extrapolation?
+        if ( Double.isNaN(m_alphaIntercept)) {
+	    // No -- extrapolation failed
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+" failed to extrapolate: alpha is NaN"); }
+	    return null;
+	} else if ( !(validEndcap || validBarrel) ) {
+	    // Invalid state
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+" failed to extrapolate: not a valid barrel or endcap point"); }
+	    return null;
+	} else if ( validEndcap && validBarrel ) {
+	    // Invalid state
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+" failed to extrapolate: valid barrel AND endcap point"); }
+	    throw new AssertionError("DEBUG: "+this.getClass().getName()+" failed to extrapolate: valid barrel AND endcap point");
+	} else {
+	    // Extrapolation succeeded.
+	    m_intercept = m_swimmer.getPointAtDistance(m_alphaIntercept);
+	    m_barrelValid = validBarrel;
+	    m_endcapValid = validEndcap;
+
+	    if (m_debug) { 
+		System.out.print("DEBUG: "+this.getClass().getName()+" extrapolated OK. validEndcap="+validEndcap+" and validBarrel="+validBarrel+" and m_alphaIntercept="+m_alphaIntercept);
+		if (m_intercept == null) {
+		    System.out.print(" -- but intercept point is null!");
+		} else {
+		    double r = Math.sqrt(m_intercept.x()*m_intercept.x() + m_intercept.y()*m_intercept.y());
+		    System.out.print(" -- intercept point at r="+r+", z="+m_intercept.z());
+		}
+		System.out.println();
+	    }
+
+	    // Output
+	    HelixExtrapolationResult output = new HelixExtrapolationResult(new TrackHelixExtrapolator(this));
+	    return output;
+	}
+    }
+    protected Hep3Vector getTangent() {
+	return VecOp.unit(m_swimmer.getMomentumAtLength(m_alphaIntercept));
+    }
+    protected Hep3Vector getTangent(Hep3Vector v) {
+	double alphaPoint = m_swimmer.getDistanceToPoint(v);
+	return VecOp.unit(m_swimmer.getMomentumAtLength(alphaPoint));
+    }
+    protected Hep3Vector extendToEndcapLayer(int layer, Vector<Double> endcap_layering_z, double endcap_rmin, double endcap_rmax, int nsides ) {
+	double layer_z = Math.abs(endcap_layering_z.get(layer));
+        double distanceToEndcap = m_swimmer.getDistanceToZ( layer_z);
+
+        Hep3Vector v = m_swimmer.getPointAtDistance(distanceToEndcap);
+        double found_endcap_polar_r = Math.sqrt(v.x()*v.x() + v.y()*v.y());
+        boolean validSolution = (found_endcap_polar_r >= endcap_rmin-m_cutSeparation && found_endcap_polar_r <= endcap_rmax+m_cutSeparation);
+        if(validSolution){
+	    return v;
+	} else {
+	    return null;
+	}
+    }
+    protected Hep3Vector extendToBarrelLayer(int layer, Vector<Double> barrel_layering_r, double barrel_zmin, double barrel_zmax, int nsides ) {
+	double layer_r =  barrel_layering_r.get(layer);
+        double distance;
+	if(nsides < 3)distance = m_swimmer.getDistanceToRadius(layer_r);
+        else distance = m_swimmer.getDistanceToPolyhedra(layer_r,nsides);
+        
+        Hep3Vector v = m_swimmer.getPointAtDistance(distance);
+        double found_barrel_z = v.z();
+        boolean validSolution =(found_barrel_z >= barrel_zmin-m_cutSeparation && found_barrel_z <= barrel_zmax+m_cutSeparation);
+	if (!Double.isNaN(distance) && validSolution) {
+	    // Extrapolated OK
+	    return v;
+	} else {
+	    // Extrapolation failed
+	    return null;
+	}
+    }
+
+    // Internal stuff
+    protected double swimToBarrel(HelixSwimmer swimmer) {
+        // Look for a hit in the first layer of the ECAL barrel
+        if(m_ECAL_barrel_nsides < 3)return swimmer.getDistanceToRadius(m_ECAL_barrel_r);
+        return swimmer.getDistanceToPolyhedra(m_ECAL_barrel_r,m_ECAL_barrel_nsides);
+    }
+    protected double swimToEndcap(HelixSwimmer swimmer) {
+        // Look for a hit in the first layer of the ECAL endcap
+        double distanceToEndcap1 = swimmer.getDistanceToZ(m_ECAL_endcap_z);
+        double distanceToEndcap2 = swimmer.getDistanceToZ(-m_ECAL_endcap_z);
+        if (distanceToEndcap1>0) {
+            return distanceToEndcap1;
+        } else if (distanceToEndcap2>0) {
+            return distanceToEndcap2;
+        } else {
+	    return Double.NaN;
+	}
+    }
+    protected boolean isValidBarrelIntercept(HelixSwimmer swimmer, double alpha, double uncertainty) {
+        // OLD: // Must have -m_ECAL_barrel_z <= z <= +m_ECAL_barrel_z (within errors)
+	// NEW: // Above AND must have -m_ECAL_endcap_z <= z <=  m_ECAL_endcap_z
+        //double uncertainty = m_cutSeparation;
+        Hep3Vector intercept = swimmer.getPointAtDistance(alpha);
+        double z = intercept.z();
+        boolean zInRangeBarrel = (z >= m_ECAL_barrel_zmin-uncertainty && z <= m_ECAL_barrel_zmax+uncertainty);
+	boolean vetoEndcap = (z >= -m_ECAL_endcap_z && z <= m_ECAL_endcap_z);
+        return zInRangeBarrel && vetoEndcap;
+    }
+    protected boolean isValidEndcapIntercept(HelixSwimmer swimmer, double alpha, double uncertainty) {
+        // Must have m_ECAL_endcap_rmin <= r <= m_ECAL_endcap_rmax (within errors)
+        //double uncertainty = m_cutSeparation;
+        Hep3Vector intercept = swimmer.getPointAtDistance(alpha);
+        double r = Math.sqrt(intercept.x()*intercept.x() + intercept.y()*intercept.y());
+        if(m_ECAL_endcap_nsides > 2)
+        {
+            double phi = Math.atan2(intercept.y(),intercept.x());
+            double phip = phi;
+            while(phip < -Math.PI/m_ECAL_endcap_nsides){phip += 2.*Math.PI/m_ECAL_endcap_nsides;}
+            while(phip >= Math.PI/m_ECAL_endcap_nsides){phip -= 2.*Math.PI/m_ECAL_endcap_nsides;}
+            double x = r*Math.cos(phip);
+            boolean rInRange = (x >= m_ECAL_endcap_rmin-uncertainty && x <= m_ECAL_endcap_rmax+uncertainty);
+            return rInRange;
+        }
+        else
+        {
+            boolean rInRange = (r >= m_ECAL_endcap_rmin-uncertainty && r <= m_ECAL_endcap_rmax+uncertainty);
+            return rInRange;
+        }
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackHelixPlusHitExtrapolator.java added at 1.1
diff -N TrackHelixPlusHitExtrapolator.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackHelixPlusHitExtrapolator.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,348 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+
+import org.lcsim.event.TrackerHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.Track;
+
+public class TrackHelixPlusHitExtrapolator extends TrackHelixExtrapolator 
+{
+    public TrackHelixPlusHitExtrapolator() {
+	super();
+    }
+
+    protected TrackHelixPlusHitExtrapolator(TrackHelixPlusHitExtrapolator old) {
+	super(old);
+    }
+
+    public void process(EventHeader event) {
+	super.process(event);
+    }
+
+    public HelixExtrapolationResult performExtrapolation(Track tr) {
+	// Null track means we blank everything and return failure.
+	m_track = tr;
+	if (tr == null) {
+	    m_intercept = null;
+	    m_swimmer = null;
+	    m_alphaIntercept = Double.NaN;
+	    return null;
+	}
+
+	// Start by swimming as per parent:
+	super.performExtrapolation(tr);
+	if (m_swimmer == null) {
+	    // Failed to extrapolate completely -- didn't even fit a helix
+	    m_intercept = null;
+	    m_swimmer = null;
+	    m_alphaIntercept = Double.NaN;
+	    return null;
+	}
+	
+	// Now scan over tracker hits and find the outermost one.
+	// Note that the details here depend on what kind of hit
+	// we're looking at.
+	List<TrackerHit> trackHits = tr.getTrackerHits();
+	if (trackHits.size() == 0) { throw new AssertionError("Track found with no track hits!"); }
+	TrackerHit outermostHit = findOutermostHit(trackHits);
+
+	// Find the POCA to the hit with the old helix:
+	double alpha = Double.NaN;
+	Hep3Vector pointOfClosestApproachToTrackHit = null;
+	Hep3Vector offset = null;
+
+	if (outermostHit instanceof  org.lcsim.event.base.BaseTrackerHitMC) {
+	    // This cheated hit has exact 3D info
+	    Hep3Vector positionOfOutermostHit = new BasicHep3Vector(outermostHit.getPosition());
+	    alpha = m_swimmer.getTrackLengthToPoint(positionOfOutermostHit);
+	    pointOfClosestApproachToTrackHit = m_swimmer.getPointAtLength(alpha);
+	    // Correct with a 3D step from POCA-to-hit to the hit itself.
+	    offset = VecOp.sub(positionOfOutermostHit, pointOfClosestApproachToTrackHit);
+	} else if (outermostHit instanceof org.lcsim.fit.helicaltrack.HelicalTrack2DHit) {
+	    // This is a barrel hit with 2D hit info (and a weak constraint on z)
+	    // Correct with an offset in r-phi but not in z
+	    org.lcsim.fit.helicaltrack.HelicalTrack2DHit hit = (org.lcsim.fit.helicaltrack.HelicalTrack2DHit)(outermostHit);
+	    double r = hit.r();
+	    alpha = m_swimmer.getDistanceToRadius(r);
+	    pointOfClosestApproachToTrackHit = m_swimmer.getPointAtLength(alpha);
+	    // Correct in xy but not in z since no z information
+	    offset = new BasicHep3Vector(hit.x() - pointOfClosestApproachToTrackHit.x(), hit.y() - pointOfClosestApproachToTrackHit.y(), 0.0);
+	} else if (outermostHit instanceof org.lcsim.fit.helicaltrack.HelicalTrackCross) {
+	    // This is an endcap hit with hit info from axial+stereo strips.
+	    // Uncertainty is ambiguous somehow -- unclear...
+	    org.lcsim.fit.helicaltrack.HelicalTrackCross hit = (org.lcsim.fit.helicaltrack.HelicalTrackCross)(outermostHit);
+	    double z = hit.z();
+	    alpha = m_swimmer.getDistanceToZ(z);
+	    pointOfClosestApproachToTrackHit = m_swimmer.getPointAtLength(alpha);
+	    // Step from extrapolation point (x, y, z) to hit position (x', y', z) -- note that both have same z.
+	    offset = new BasicHep3Vector(hit.x() - pointOfClosestApproachToTrackHit.x(), hit.y() - pointOfClosestApproachToTrackHit.y(), 0.0);
+	} else if (outermostHit instanceof org.lcsim.fit.helicaltrack.HelicalTrack3DHit) {
+	    // This is a vertex detector hit with 3D hit info
+	    org.lcsim.fit.helicaltrack.HelicalTrack3DHit hit = (org.lcsim.fit.helicaltrack.HelicalTrack3DHit)(outermostHit);
+	    Hep3Vector positionOfOutermostHit = new BasicHep3Vector(hit.x(), hit.y(), hit.z());
+	    alpha = m_swimmer.getTrackLengthToPoint(positionOfOutermostHit);
+	    pointOfClosestApproachToTrackHit = m_swimmer.getPointAtLength(alpha);
+	    // Correct with a 3D step from POCA-to-hit to the hit itself.
+	    offset = VecOp.sub(positionOfOutermostHit, pointOfClosestApproachToTrackHit);
+	} else {
+	    // Unknown!
+	    Hep3Vector positionOfOutermostHit = new BasicHep3Vector(outermostHit.getPosition());
+	    double r = Math.sqrt(positionOfOutermostHit.x()*positionOfOutermostHit.x() + positionOfOutermostHit.y()*positionOfOutermostHit.y());
+	    double z = positionOfOutermostHit.z();
+	    throw new AssertionError("ERROR: Unknown hit of type "+outermostHit.getClass().getName()+" at r="+r+", z="+z);
+	}
+
+	Hep3Vector momentumAtPOCA = m_swimmer.getMomentumAtLength(alpha);
+	Hep3Vector newPoint = VecOp.add(pointOfClosestApproachToTrackHit, offset);
+
+	// Make a new helix swimmer:
+	HelixSwimmer newHelix = new HelixSwimmer(m_fieldStrength[2]);
+	newHelix.setTrack(momentumAtPOCA, newPoint, tr.getCharge());
+	// Over-write old helix swimmer:
+	m_swimmer = newHelix;
+	
+	// Try swimming to the barrel:
+	double  alphaBarrel = swimToBarrel(m_swimmer);
+	boolean validBarrel = false;
+	// Try swimming to the endcap:
+	double  alphaEndcap = swimToEndcap(m_swimmer);
+	boolean validEndcap = false;
+	
+	// Get helix fit output
+	m_alphaIntercept = Double.NaN; 
+	m_intercept = null;
+	if (isValidBarrelIntercept(m_swimmer, alphaBarrel, m_cutSeparation)) {
+	    validBarrel = true;
+	}
+	if (isValidEndcapIntercept(m_swimmer, alphaEndcap, m_cutSeparation)) {
+	    validEndcap = true;
+	}
+
+	// Check for special case in corner of barrel/endcap overlap region
+	if ( validEndcap && validBarrel ) {
+	    // Both apparently valid... check again
+	    boolean tightValidEndcap = isValidEndcapIntercept(m_swimmer, alphaEndcap, 0.0);
+	    boolean tightValidBarrel = isValidBarrelIntercept(m_swimmer, alphaBarrel, 0.0);
+	    if (tightValidEndcap && tightValidBarrel) {
+		// This can happen if the track has moderate pT -- it goes into the
+		// barrel, spirals out again, and eventually hits the endcap.
+		// If this is what happened then it should reach the barrel first.
+		if (alphaEndcap < alphaBarrel) {
+		    Hep3Vector interceptEndcap = m_swimmer.getPointAtDistance(alphaEndcap);
+		    Hep3Vector interceptBarrel = m_swimmer.getPointAtDistance(alphaBarrel);
+		    double rEndcap = Math.sqrt(interceptEndcap.x()*interceptEndcap.x() + interceptEndcap.y()*interceptEndcap.y());
+		    double rBarrel = Math.sqrt(interceptBarrel.x()*interceptBarrel.x() + interceptBarrel.y()*interceptBarrel.y());
+		    double zEndcap = interceptEndcap.z();
+		    double zBarrel = interceptBarrel.z();
+		    throw new AssertionError("Track hits endcap THEN barrel -- this doesn't make sense!\nEndcap intercept at alpha="+alphaEndcap+" has r="+rEndcap+", z="+zEndcap+"\nBarrel intercept at alpha="+alphaBarrel+" has r="+rBarrel+", z="+zBarrel);
+		} else {
+		    validEndcap = false;
+		}
+	    } else if (!tightValidEndcap && !tightValidBarrel) {
+		throw new AssertionError("Invalid state");
+	    } else {
+		// Only one valid solution -- OK
+		validEndcap = tightValidEndcap;
+		validBarrel = tightValidBarrel;
+	    }
+	}
+	
+	if (validEndcap) { m_alphaIntercept = alphaEndcap; }
+	if (validBarrel) { m_alphaIntercept = alphaBarrel; }
+
+	// Did we make a successful extrapolation?
+	if ( Double.isNaN(m_alphaIntercept)) {
+	    // No -- extrapolation failed
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+" failed to extrapolate: alpha is NaN"); }
+	    return null;
+	} else if ( !(validEndcap || validBarrel) ) {
+	    // Invalid state
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+" failed to extrapolate: not a valid barrel or endcap point"); }
+	    return null;
+	} else if ( validEndcap && validBarrel ) {
+	    // Invalid state
+	    if (m_debug) { System.out.println("DEBUG: "+this.getClass().getName()+" failed to extrapolate: valid barrel AND endcap point"); }
+	    throw new AssertionError("DEBUG: "+this.getClass().getName()+" failed to extrapolate: valid barrel AND endcap point");
+	    //return null;
+	} else {
+	    // Extrapolation succeeded.
+	    m_intercept = m_swimmer.getPointAtDistance(m_alphaIntercept);
+	    m_barrelValid = validBarrel;
+	    m_endcapValid = validEndcap;
+	    if (m_debug) { 
+		System.out.print("DEBUG: "+this.getClass().getName()+" extrapolated OK. validEndcap="+validEndcap+" and validBarrel="+validBarrel+" and m_alphaIntercept="+m_alphaIntercept); 
+		if (m_intercept == null) {
+		    System.out.print(" -- but intercept point is null!");
+		} else {
+		    double r = Math.sqrt(m_intercept.x()*m_intercept.x() + m_intercept.y()*m_intercept.y());
+		    System.out.print(" -- intercept point at r="+r+", z="+m_intercept.z());
+		}
+		System.out.println();
+	    }
+	    // Output
+	    HelixExtrapolationResult output = new HelixExtrapolationResult(new TrackHelixPlusHitExtrapolator(this));
+	    return output;
+	}
+    }
+
+    TrackerHit findOutermostHit(List<TrackerHit> trackerHits) {
+	boolean cheatHitFound = false;
+	boolean helicalHitFound = false;
+	for (TrackerHit hit : trackerHits) {
+	    boolean thisHitIsCheatHit = (hit instanceof org.lcsim.event.base.BaseTrackerHitMC);
+	    boolean thisHitIsHelicalHit = (hit instanceof org.lcsim.fit.helicaltrack.HelicalTrackCross || hit instanceof org.lcsim.fit.helicaltrack.HelicalTrack2DHit || hit instanceof org.lcsim.fit.helicaltrack.HelicalTrack3DHit);
+	    if (thisHitIsCheatHit && thisHitIsHelicalHit) {
+		throw new AssertionError("Ambiguous hit of class "+hit.getClass().getName());
+	    } else if (!thisHitIsCheatHit && !thisHitIsHelicalHit) {
+		throw new AssertionError("Unidentified hit of unknown class "+hit.getClass().getName());
+	    }
+	    if (thisHitIsCheatHit) { 
+		cheatHitFound = true;
+	    }
+	    if (thisHitIsHelicalHit) {
+		helicalHitFound = true;
+	    }
+	}
+	if (cheatHitFound && helicalHitFound) {
+	    throw new AssertionError("Mixed list of hits!");
+	}
+	if (cheatHitFound) {
+	    return findOutermostCheatHit(trackerHits);
+	} else if (helicalHitFound) {
+	    return findOutermostHelicalHit(trackerHits);
+	} else {
+	    throw new AssertionError("Unclassified list of hits!");
+	}
+    }
+
+    TrackerHit findOutermostCheatHit(List<TrackerHit> trackerHits) {
+	// Find hit with outermost Z
+	TrackerHit outermostHit = null;
+	Hep3Vector positionOfOutermostHit = null;
+	for (TrackerHit trackHit : trackerHits) {
+	    Hep3Vector pos = new BasicHep3Vector(trackHit.getPosition());
+	    if (outermostHit==null) {
+		outermostHit = trackHit;
+		positionOfOutermostHit = pos;
+	    } else {
+		if (Math.abs(pos.z()) > Math.abs(positionOfOutermostHit.z())) {
+		    outermostHit = trackHit;
+		    positionOfOutermostHit = pos;
+		}
+	    }
+	}
+	return outermostHit;
+    }
+    
+    TrackerHit findOutermostHelicalHit(List<TrackerHit> trackerHits) {
+	// This is a little tricky. The rules are:
+	//  1) Any outer tracker layer > any vertex detector layer
+	//  2) For two vertex detector hits, the one with the larger |z| wins.
+	//  3) For any two outer tracker barrel layers, the one with the larger r wins.
+	//  4) For any two outer tracker endcap layers, the one with the larger |z| wins.
+	//  5) For a tracker barrel & endcap layer...
+	//       5a) If barrel |zmin| > endcap |z|, the barrel wins
+	//       5b) If barrel |zmax| < endcap |z|, the endcap wins
+	//       5c) If there is ambiguity, the barrel wins (a little arbitrary)
+	TrackerHit outermostHit = null;
+	for (TrackerHit hit : trackerHits) {
+	    if (outermostHit == null) {
+		outermostHit = hit;
+	    } else {
+		boolean outermostHitIsVertexHit = (outermostHit instanceof org.lcsim.fit.helicaltrack.HelicalTrack3DHit);
+		boolean outermostHitIsBarrelHit = (outermostHit instanceof org.lcsim.fit.helicaltrack.HelicalTrack2DHit);
+		boolean outermostHitIsEndcapHit = (outermostHit instanceof org.lcsim.fit.helicaltrack.HelicalTrackCross);
+		if (!(outermostHitIsVertexHit || outermostHitIsBarrelHit || outermostHitIsEndcapHit)) { throw new AssertionError("Unidentified hit"); }
+		if (outermostHitIsVertexHit && outermostHitIsBarrelHit) { throw new AssertionError("Ambiguous hit"); }
+		if (outermostHitIsVertexHit && outermostHitIsEndcapHit) { throw new AssertionError("Ambiguous hit"); }
+		if (outermostHitIsBarrelHit && outermostHitIsEndcapHit) { throw new AssertionError("Ambiguous hit"); }
+		boolean currentHitIsVertexHit = (hit instanceof org.lcsim.fit.helicaltrack.HelicalTrack3DHit);
+		boolean currentHitIsBarrelHit = (hit instanceof org.lcsim.fit.helicaltrack.HelicalTrack2DHit);
+		boolean currentHitIsEndcapHit = (hit instanceof org.lcsim.fit.helicaltrack.HelicalTrackCross);
+		if (!(currentHitIsVertexHit || currentHitIsBarrelHit || currentHitIsEndcapHit)) { throw new AssertionError("Unidentified hit"); }
+		if (currentHitIsVertexHit && currentHitIsBarrelHit) { throw new AssertionError("Ambiguous hit"); }
+		if (currentHitIsVertexHit && currentHitIsEndcapHit) { throw new AssertionError("Ambiguous hit"); }
+		if (currentHitIsBarrelHit && currentHitIsEndcapHit) { throw new AssertionError("Ambiguous hit"); }
+
+		if (currentHitIsVertexHit) {
+		    if (outermostHitIsVertexHit) {
+			//  2) For two vertex detector hits, the one with the larger |z| wins.
+			double currentHit_z = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrack3DHit)(hit)).z());
+			double outermostHit_z = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrack3DHit)(outermostHit)).z());
+			if (currentHit_z > outermostHit_z) {
+			    outermostHit = hit;
+			}
+			continue;
+		    } else {
+			//  1) Any outer tracker layer > any vertex detector layer
+			continue;
+		    }
+		} else {
+		    if (outermostHitIsVertexHit) {
+			//  1) Any outer tracker layer > any vertex detector layer
+			outermostHit = hit;
+			continue;
+		    } else {
+			if (outermostHitIsBarrelHit && currentHitIsBarrelHit) {
+			    //  3) For any two outer tracker barrel layers, the one with the larger r wins.
+			    double currentHit_r = ((org.lcsim.fit.helicaltrack.HelicalTrack2DHit)(hit)).r();
+			    double outermostHit_r = ((org.lcsim.fit.helicaltrack.HelicalTrack2DHit)(outermostHit)).r();
+			    if (currentHit_r > outermostHit_r) {
+				outermostHit = hit;
+			    }
+			    continue;
+			} else if (outermostHitIsEndcapHit && currentHitIsEndcapHit) {
+			    //  4) For any two outer tracker endcap layers, the one with the larger |z| wins.
+			    double currentHit_z = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrackCross)(hit)).z());
+			    double outermostHit_z = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrackCross)(outermostHit)).z());
+			    if (currentHit_z > outermostHit_z) {
+				outermostHit = hit;
+			    }
+			    continue;
+			} else if (outermostHitIsBarrelHit && currentHitIsEndcapHit) {
+			    // One barrel and one endcap layer
+			    double barrel_zmin = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrack2DHit)(outermostHit)).zmin());
+			    double barrel_zmax = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrack2DHit)(outermostHit)).zmax());
+			    double endcap_z = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrackCross)(hit)).z());
+			    if (barrel_zmin > endcap_z) {
+				// 5a) If barrel |zmin| > endcap |z|, the barrel wins
+				continue;
+			    } else if (barrel_zmax < endcap_z) {
+				// 5b) If barrel |zmax| < endcap |z|, the endcap wins
+				outermostHit = hit;
+				continue;
+			    } else {
+				// 5c) If there is ambiguity, the barrel wins (a little arbitrary)
+				continue;
+			    }
+			} else if (outermostHitIsEndcapHit&&currentHitIsBarrelHit) {
+			    // One barrel and one endcap layer
+			    double barrel_zmin = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrack2DHit)(hit)).zmin());
+			    double barrel_zmax = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrack2DHit)(hit)).zmax());
+			    double endcap_z = Math.abs(((org.lcsim.fit.helicaltrack.HelicalTrackCross)(outermostHit)).z());
+			    if (barrel_zmin > endcap_z) {
+				// 5a) If barrel |zmin| > endcap |z|, the barrel wins
+				outermostHit = hit;
+				continue;
+			    } else if (barrel_zmax < endcap_z) {
+				// 5b) If barrel |zmax| < endcap |z|, the endcap wins
+				continue;
+			    } else {
+				// 5c) If there is ambiguity, the barrel wins (a little arbitrary)
+				outermostHit = hit;
+				continue;
+			    }
+			}
+		    }
+		}
+		// Shouldn't reach here
+		throw new AssertionError("Failed to classify! outermostHit is "+outermostHit.getClass().getName()+" and hit is "+hit.getClass().getName());
+	    }
+	}
+
+	return outermostHit;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackToClusterMapMaker.java added at 1.1
diff -N TrackToClusterMapMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackToClusterMapMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,53 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*; 
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+
+/**
+ * Abstract class with several implementations, used for
+ * matching tracks to clusters. The rules are:
+ *   1) The input tracks are supplied as a named List from the event.
+ *   2) Matched tracks are written out to the event as a Map<Track,Cluster>
+ *   3) Unmatched tracks are written out to the event as a List<Track>
+ *   4) The output tracks MAY be altered copies of the input tracks.
+ *      For example, if two tracks enter the calorimeter in the same
+ *      cell, they may get bundled into a single track. So the user
+ *      should not re-use the input track list.
+ *   5) There should be no overlap between the matched tracks and the
+ *      unmatched tracks.
+ *   6) Implementing classes MAY write out additional output.
+ *
+ * @version $Id: TrackToClusterMapMaker.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ * @author [log in to unmask]
+ */
+
+public abstract class TrackToClusterMapMaker extends Driver {
+
+    abstract protected Map<Track,Cluster> makeMap(EventHeader event);
+
+    protected String m_inputTrackListName;
+    protected String m_outputMapName;
+    protected String m_outputUnmatchedTrackListName;
+
+    /**
+     * General-purpose constructor. Implementing classes may make their
+     * own constructor with additional arguments.
+     *
+     * @param inputTrackList The name of the input List of Tracks to read in from the event and match to clusters.
+     * @param outputMap The name to write out the output Map<Track,Cluster> of matched tracks as.
+     * @param outputUnmatchedTrackList The name to write unmatched/unused tracks out as.
+     */
+    public TrackToClusterMapMaker(String inputTrackList, String outputMap, String outputUnmatchedTrackList) {
+	super();
+	m_inputTrackListName = inputTrackList;
+	m_outputMapName = outputMap;
+	m_outputUnmatchedTrackListName = outputUnmatchedTrackList;
+    }
+
+    public void process(EventHeader event) {
+	Map<Track,Cluster> output = makeMap(event);
+	event.put(m_outputMapName, output);
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackToElectronMapMaker.java added at 1.1
diff -N TrackToElectronMapMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackToElectronMapMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,165 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.event.*;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.util.swim.Line;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+
+/** 
+ * Try to make electrons. 
+ *
+ * @version $Id: TrackToElectronMapMaker.java,v 1.1 2012/04/11 15:49:35 zaidan Exp $
+ * @author [log in to unmask]
+ */
+
+public class TrackToElectronMapMaker extends TrackToClusterMapMaker {
+
+    protected String m_outputElectronClusterListName;
+    protected String m_inputPhotonListName;
+    protected ClusterEnergyCalculator m_photonCalib;
+    protected HelixExtrapolator m_findCluster;
+    protected CalorimeterInformation ci;
+
+    public TrackToElectronMapMaker(HelixExtrapolator findCluster, String inputPhotonList, String inputTrackList, String outputTrackClusterMap, String outputUnmatchedTrackList, String outputElectronClusterList) {
+	super(inputTrackList, outputTrackClusterMap, outputUnmatchedTrackList);
+	m_outputElectronClusterListName = outputElectronClusterList;
+	m_inputPhotonListName = inputPhotonList;
+	m_findCluster = findCluster;
+	m_photonCalib = new QPhotonClusterEnergyCalculator();
+    }
+
+
+
+    protected Map<Track,Cluster> makeMap(EventHeader event) {
+	// Read in inputs
+        if(ci == null)ci = CalorimeterInformation.instance();
+	List<Track> trackList = event.get(Track.class, m_inputTrackListName);
+	List<Cluster> photons = event.get(Cluster.class, m_inputPhotonListName);
+	
+	// The output
+	Map<Track,Cluster> outputMap = new HashMap<Track,Cluster>();
+
+	// Extrapolation utility
+	LocalHelixExtrapolationTrackClusterMatcher genMatch = new LocalHelixExtrapolationTrackClusterMatcher(m_findCluster);
+
+	// Loop over tracks, looking for a good match to a photon
+	Map<Track,Cluster> electronCandidateMap = new HashMap<Track,Cluster>();
+	Set<Cluster> electronCandidateClusters = new HashSet<Cluster>();
+	Set<Cluster> vetoedElectronCandidateClusters = new HashSet<Cluster>();
+	List<Cluster> electronClusters = new Vector<Cluster>();
+	List<Track> electronTracks = new Vector<Track>();
+	for (Track tr : trackList) {
+	    Cluster matchedCluster = genMatch.matchTrackToCluster(tr, photons);
+	    if (matchedCluster != null) {
+		if (photons.contains(matchedCluster)) {
+		    // Electron candidate
+		    if (electronCandidateClusters.contains(matchedCluster)) {
+			// Multiple track matches => veto
+			vetoedElectronCandidateClusters.add(matchedCluster);
+		    }
+		    electronCandidateClusters.add(matchedCluster);
+		    // Now, are we confident that it's an electron?
+		    double electronResid = electronEnergyNormalizedResidual(tr, matchedCluster);
+		    int hitsInCore = countHitsInCoreInFirstLayers(tr, matchedCluster, 5);
+		    double trackIP = distanceFromTrackToPhotonCore(tr, matchedCluster);
+		    if (electronResid > -2.0 && electronResid < 2.0 && trackIP < 7.0 && hitsInCore > 1) {
+			// Accept as electron
+			electronCandidateMap.put(tr, matchedCluster);
+		    }
+		} else {
+		    throw new AssertionError("Internal consistency failure");
+		}
+	    }
+	}
+	
+	// Now, did we accept any of those?
+	for (Track tr : electronCandidateMap.keySet()) {
+	    Cluster clus = electronCandidateMap.get(tr);
+	    if (!electronCandidateClusters.contains(clus)) { throw new AssertionError("Book-keeping failure"); }
+	    if (!vetoedElectronCandidateClusters.contains(clus)) {
+		// We accepted it and didn't veto it => electron
+		if (electronClusters.contains(clus)) { throw new AssertionError("Book-keeping failure"); }
+		electronClusters.add(clus);
+		electronTracks.add(tr);
+		outputMap.put(tr,clus);
+	    }
+	}
+
+	// Unmatched tracks
+	List<Track> unmatchedTracks = new Vector<Track>();
+	unmatchedTracks.addAll(trackList);
+	unmatchedTracks.removeAll(electronTracks);
+
+	// Outputs
+	event.put(m_outputUnmatchedTrackListName, unmatchedTracks);
+	event.put(m_outputElectronClusterListName, electronClusters);
+	return outputMap;
+    }
+
+    // Utility routines
+
+    private double electronEnergyNormalizedResidual(Track tr, Cluster clus) {
+	double energyAssumingElectron = m_photonCalib.getEnergy(clus);
+	double trackMomentum = (new BasicHep3Vector(tr.getMomentum())).magnitude();
+	double residual = trackMomentum - energyAssumingElectron;
+	double estimatedError = 0.2 * Math.sqrt(trackMomentum);
+	if (trackMomentum < 1.0) { 
+	    // Don't shrink the error too much.
+	    estimatedError = 0.2; 
+	}
+	return (residual/estimatedError);
+    }
+
+
+    private int countHitsInCoreInFirstLayers(Track tr, Cluster clus, int nLayers) {
+        // Until we come up with a way to do this with a polyhedral
+        // detector, bypass the check
+        if(ci.getNSides(CalorimeterType.EM_BARREL) > 2)return nLayers;
+	Set<Long> coreClusterHits = new HashSet<Long>();
+	for (CalorimeterHit hit : clus.getClusters().get(0).getCalorimeterHits()) {
+	    coreClusterHits.add(hit.getCellID());
+	}
+	int countMatches = 0;
+	HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
+	if (result != null) {
+	    for (int iLayer=0; iLayer<nLayers; iLayer++) {
+		Long cellID = result.extendToECALLayerAndFindCell(iLayer);
+		if (cellID != null && coreClusterHits.contains(cellID)) {
+		    countMatches++;
+		}
+	    }
+	}
+	return countMatches;
+    }
+
+
+    private double distanceFromTrackToPhotonCore(Track tr, Cluster clus) {
+	HelixExtrapolationResult result = m_findCluster.performExtrapolation(tr);
+	Hep3Vector interceptPoint = null;
+	if (result != null) {
+	    interceptPoint = result.getInterceptPoint();
+	}
+	if (interceptPoint != null) {
+	    Cluster coreSubCluster = clus.getClusters().get(0);
+	    BasicCluster copyOfCoreSubCluster = new BasicCluster();
+	    copyOfCoreSubCluster.addCluster(coreSubCluster);
+	    TensorClusterPropertyCalculator calc = new TensorClusterPropertyCalculator();
+	    copyOfCoreSubCluster.setPropertyCalculator(calc);
+	    copyOfCoreSubCluster.calculateProperties();
+	    double[][]axes = calc.getPrincipleAxis();
+	    Hep3Vector coreDirection = new BasicHep3Vector(axes[0][0], axes[0][1], axes[0][2]);
+	    Hep3Vector corePosition = new BasicHep3Vector(calc.getPosition());
+	    Line line = new Line(corePosition, coreDirection);
+	    double s = line.getDistanceToPoint(interceptPoint);
+	    Hep3Vector poca = line.getPointAtDistance(s);
+	    double doca = VecOp.sub(poca, interceptPoint).magnitude();
+	    return doca;
+	} else {
+	    return Double.NaN;
+	}
+    }
+
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackToGenericClusterMapMaker.java added at 1.1
diff -N TrackToGenericClusterMapMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackToGenericClusterMapMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,112 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*; 
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
+
+public class TrackToGenericClusterMapMaker extends TrackToClusterMapMaker {
+
+    protected HelixExtrapolator m_findCluster;
+    public TrackToGenericClusterMapMaker(HelixExtrapolator findCluster, String inputTrackList, String outputMap, String outputUnmatchedTrackList) {
+	super(inputTrackList, outputMap, outputUnmatchedTrackList);
+	m_findCluster = findCluster;
+    }
+
+    protected Map<String,String> m_mapInputNameToMatchedOutputName = new HashMap<String,String>();
+    protected Map<String,String> m_mapInputNameToUnmatchedOutputName = new HashMap<String,String>();
+    public void addInputList(String inputName, String matchedOutputName, String unmatchedOutputName) {
+	m_mapInputNameToMatchedOutputName.put(inputName, matchedOutputName);
+	m_mapInputNameToUnmatchedOutputName.put(inputName, unmatchedOutputName);
+    }
+
+    protected Map<Track,Cluster> makeMap(EventHeader event) {
+	// Read in inputs
+	List<Track> trackList = event.get(Track.class, m_inputTrackListName);
+	Map<String, List<Cluster>> inputLists = new HashMap<String, List<Cluster>>();
+	for (String str : m_mapInputNameToMatchedOutputName.keySet()) {
+	    List<Cluster> currentList = event.get(Cluster.class, str);
+	    inputLists.put(str, currentList);
+	}
+	if (inputLists.size() != m_mapInputNameToMatchedOutputName.size()) { throw new AssertionError("Book-keeping error"); }
+	if (inputLists.size() != m_mapInputNameToUnmatchedOutputName.size()) { throw new AssertionError("Book-keeping error"); }
+
+	// Set up matching
+	LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher(m_findCluster);
+	LocalHelixExtrapolationTrackClusterMatcher genMatch = new LocalHelixExtrapolationTrackClusterMatcher(m_findCluster);
+	DualActionTrackClusterMatcher dualMatch = new DualActionTrackClusterMatcher(mipMatch, genMatch);
+	mipMatch.process(event);
+	genMatch.process(event);
+	List<Cluster> allMatchableClusters = new Vector<Cluster>();
+	for (List<Cluster> inputList : inputLists.values()) {
+	    allMatchableClusters.addAll(inputList);
+	}
+
+	// Do matching
+	Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
+	Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
+	for (Track tr : trackList) {
+	    Cluster matchedCluster = dualMatch.matchTrackToCluster(tr, allMatchableClusters);
+	    if (matchedCluster != null) {
+		// Found a match
+		// Optionally, handle these cases:
+		//   * Match is to a teeny cluster piece (leftoverHitClusters) but there is structure nearby inside same DTree
+		//   * Match is to a photon (try to split up)
+		//   * Match is to a cluster with E>>p (try to split up)
+		// ... but those don't really apply here (they aren't MIPs)
+		tracksMatchedToClusters.put(tr, matchedCluster);
+		List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
+		if (clusTrList == null) { 
+		    clusTrList = new Vector<Track>(); 
+		    clustersMatchedToTracks.put(matchedCluster, clusTrList); 
+		}
+		clusTrList.add(tr);
+	    }
+	}
+
+	// Flag unique matches to be written out:
+	Map<Track,Cluster> outputMap = new HashMap<Track,Cluster>();
+	for (Track tr : tracksMatchedToClusters.keySet()) {
+	    Cluster matchedClus = tracksMatchedToClusters.get(tr);
+	    List<Track> tracksOfMatchedClus = clustersMatchedToTracks.get(matchedClus);
+	    if (tracksOfMatchedClus == null) { throw new AssertionError("Book-keeping error!"); }
+	    if (tracksOfMatchedClus.size()==0) {
+		throw new AssertionError("Book-keeping error!");
+	    } else if (tracksOfMatchedClus.size()==1) {
+		// Unique match -- OK
+		outputMap.put(tr, matchedClus);
+	    } else {
+		// Ambiguous match -- ignore for now
+	    }
+	}
+
+	// Identify unmatched tracks
+	List<Track> unmatchedTracks = new Vector<Track>();
+	unmatchedTracks.addAll(trackList);
+	unmatchedTracks.removeAll(outputMap.keySet());
+
+	// Separate out lists of matched & unmatched clusters
+	for (String str : m_mapInputNameToMatchedOutputName.keySet()) {
+	    List<Cluster> inputList = inputLists.get(str);
+	    List<Cluster> outputListMatched = new Vector<Cluster>();
+	    List<Cluster> outputListUnmatched = new Vector<Cluster>();
+	    String matchedOutputName = m_mapInputNameToMatchedOutputName.get(str);
+	    String unmatchedOutputName = m_mapInputNameToUnmatchedOutputName.get(str);
+	    for (Cluster clus : inputList) {
+		if (outputMap.values().contains(clus)) {
+		    outputListMatched.add(clus);
+		} else {
+		    outputListUnmatched.add(clus);
+		}
+	    }
+	    event.put(matchedOutputName, outputListMatched);
+	    event.put(unmatchedOutputName, outputListUnmatched);
+	}
+
+	// All done
+	event.put(m_outputUnmatchedTrackListName, unmatchedTracks);
+	return outputMap;
+    }
+}
+

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackToMipClusterMapMaker.java added at 1.1
diff -N TrackToMipClusterMapMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackToMipClusterMapMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,102 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*; 
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
+
+public class TrackToMipClusterMapMaker extends TrackToClusterMapMaker {
+    protected HelixExtrapolator m_findCluster;
+    public TrackToMipClusterMapMaker(HelixExtrapolator findCluster, String inputTrackList, String outputMap, String outputUnmatchedTrackList) {
+	super(inputTrackList, outputMap, outputUnmatchedTrackList);
+	m_findCluster = findCluster;
+    }
+
+    protected Map<String,String> m_mapInputNameToMatchedOutputName = new HashMap<String,String>();
+    protected Map<String,String> m_mapInputNameToUnmatchedOutputName = new HashMap<String,String>();
+    public void addInputList(String inputName, String matchedOutputName, String unmatchedOutputName) {
+	m_mapInputNameToMatchedOutputName.put(inputName, matchedOutputName);
+	m_mapInputNameToUnmatchedOutputName.put(inputName, unmatchedOutputName);
+    }
+
+    protected Map<Track,Cluster> makeMap(EventHeader event) {
+	// Read in inputs
+	List<Track> trackList = event.get(Track.class, m_inputTrackListName);
+	Map<String, List<Cluster>> inputLists = new HashMap<String, List<Cluster>>();
+	for (String str : m_mapInputNameToMatchedOutputName.keySet()) {
+	    List<Cluster> currentList = event.get(Cluster.class, str);
+	    inputLists.put(str, currentList);
+	}
+	if (inputLists.size() != m_mapInputNameToMatchedOutputName.size()) { throw new AssertionError("Book-keeping error"); }
+	if (inputLists.size() != m_mapInputNameToUnmatchedOutputName.size()) { throw new AssertionError("Book-keeping error"); }
+
+	// Set up matching
+	LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher(m_findCluster);
+	mipMatch.process(event);
+	List<Cluster> allMatchableClusters = new Vector<Cluster>();
+	for (List<Cluster> inputList : inputLists.values()) {
+	    allMatchableClusters.addAll(inputList);
+	}
+
+	// Do matching
+	Map<Track,Cluster> tracksMatchedToClusters = new HashMap<Track,Cluster>();
+	Map<Cluster, List<Track>> clustersMatchedToTracks = new HashMap<Cluster, List<Track>>();
+	for (Track tr : trackList) {
+	    Cluster matchedCluster = mipMatch.matchTrackToCluster(tr, allMatchableClusters);
+	    if (matchedCluster != null) {
+		// Found a match
+		tracksMatchedToClusters.put(tr, matchedCluster);
+		List<Track> clusTrList = clustersMatchedToTracks.get(matchedCluster);
+		if (clusTrList == null) { 
+		    clusTrList = new Vector<Track>(); 
+		    clustersMatchedToTracks.put(matchedCluster, clusTrList); 
+		}
+		clusTrList.add(tr);
+	    }
+	}
+
+	// Flag unique matches to be written out:
+	Map<Track,Cluster> outputMap = new HashMap<Track,Cluster>();
+	for (Track tr : tracksMatchedToClusters.keySet()) {
+	    Cluster matchedClus = tracksMatchedToClusters.get(tr);
+	    List<Track> tracksOfMatchedClus = clustersMatchedToTracks.get(matchedClus);
+	    if (tracksOfMatchedClus == null) { throw new AssertionError("Book-keeping error!"); }
+	    if (tracksOfMatchedClus.size()==0) {
+		throw new AssertionError("Book-keeping error!");
+	    } else if (tracksOfMatchedClus.size()==1) {
+		// Unique match -- OK
+		outputMap.put(tr, matchedClus);
+	    } else {
+		// Ambiguous match -- ignore for now
+	    }
+	}
+
+	// Identify unmatched tracks
+	List<Track> unmatchedTracks = new Vector<Track>();
+	unmatchedTracks.addAll(trackList);
+	unmatchedTracks.removeAll(outputMap.keySet());
+
+	// Separate out lists of matched & unmatched clusters
+	for (String str : m_mapInputNameToMatchedOutputName.keySet()) {
+	    List<Cluster> inputList = inputLists.get(str);
+	    List<Cluster> outputListMatched = new Vector<Cluster>();
+	    List<Cluster> outputListUnmatched = new Vector<Cluster>();
+	    String matchedOutputName = m_mapInputNameToMatchedOutputName.get(str);
+	    String unmatchedOutputName = m_mapInputNameToUnmatchedOutputName.get(str);
+	    for (Cluster clus : inputList) {
+		if (outputMap.values().contains(clus)) {
+		    outputListMatched.add(clus);
+		} else {
+		    outputListUnmatched.add(clus);
+		}
+	    }
+	    event.put(matchedOutputName, outputListMatched);
+	    event.put(unmatchedOutputName, outputListUnmatched);
+	}
+
+	// All done
+	event.put(m_outputUnmatchedTrackListName, unmatchedTracks);
+	return outputMap;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/identifier
TrackToPreShowerMipMapMaker.java added at 1.1
diff -N TrackToPreShowerMipMapMaker.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ TrackToPreShowerMipMapMaker.java	11 Apr 2012 15:49:35 -0000	1.1
@@ -0,0 +1,159 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier;
+
+import java.util.*; 
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.*;
+import org.lcsim.recon.cluster.util.BasicCluster;
+
+public class TrackToPreShowerMipMapMaker extends TrackToClusterMapMaker {
+
+    String m_inputMapTrkToMIP;
+    String m_outputMips;
+    String m_outputSmallClusters;
+    String m_outputBlocks;
+    public TrackToPreShowerMipMapMaker(String inputMapTrkToMIP, String inputTrackList, String outputMap, String outputUnmatchedTrackList, String outputMips, String outputSmallClusters, String outputBlocks) {
+	super(inputTrackList, outputMap, outputUnmatchedTrackList);
+	m_inputMapTrkToMIP = inputMapTrkToMIP;
+	m_outputMips = outputMips;
+	m_outputSmallClusters = outputSmallClusters;
+	m_outputBlocks = outputBlocks;
+    }
+
+    protected Map<Track,Cluster> makeMap(EventHeader event) {
+	// The output
+	Map<Track,Cluster> outputMap = new HashMap<Track,Cluster>();
+	List<Cluster> outputClustersMips = new Vector<Cluster>();
+	List<Cluster> outputClustersBlocks = new Vector<Cluster>();
+	List<Cluster> outputClustersSmall = new Vector<Cluster>();
+
+	// Read in MIP connections
+	List<Track> inputTrackList = event.get(Track.class, m_inputTrackListName);
+	Map<Track,BasicCluster> MapTrkToMIP = (Map<Track,BasicCluster>)(event.get(m_inputMapTrkToMIP));
+	
+	// Now, each track is connected to a MIP. But some of these MIP clusters
+	// may overlap. We need to identify the cases when that happens and then
+	//   * produce a merged cluster
+	//   * have each of the tracks pointing to the same merged cluster
+	// First, check for overlaps...
+	Map<Cluster,Track> mapMipToTrack = new HashMap<Cluster,Track>();
+	Map<Cluster,Cluster> mapMipToMergedCluster = new HashMap<Cluster,Cluster>();
+	Map<Cluster,List<Track>> mapMergedClusterToTracks = new HashMap<Cluster,List<Track>>();
+
+	// Find hits for each MIP & which clusters they're inside
+	Map<CalorimeterHit,Set<Cluster>> hitMipMap = new HashMap<CalorimeterHit,Set<Cluster>>();
+	for (Track tr : MapTrkToMIP.keySet()) {
+	    if (!inputTrackList.contains(tr)) { throw new AssertionError("Book-keeping error"); }
+	    BasicCluster mip = MapTrkToMIP.get(tr);
+	    mapMipToTrack.put(mip,tr);
+	    for (CalorimeterHit hit : mip.getCalorimeterHits()) {
+		Set<Cluster> mipsOfHit = hitMipMap.get(hit);
+		if (mipsOfHit == null) {
+		    mipsOfHit= new HashSet<Cluster>();
+		    hitMipMap.put(hit, mipsOfHit);
+		}
+		mipsOfHit.add(mip);
+	    }
+	}
+
+	// Look for groups of mips such that
+	//  * Every MIP in a group is connected (directly or indirectly)
+	//    to every other MIP in the group.
+	//  * Every MIP appears in exactly one group.
+	List<Set<Cluster>> mipOverlapSets = new Vector<Set<Cluster>>();
+	for (CalorimeterHit hit : hitMipMap.keySet()) {
+	    Set<Cluster> touchedClusters = hitMipMap.get(hit);
+	    Set<Set<Cluster>> oldLinkedClusterSets = new HashSet<Set<Cluster>>();
+	    for (Cluster clus : touchedClusters) {
+		for (Set<Cluster> currentSet : mipOverlapSets) {
+		    if (currentSet.contains(clus)) {
+			oldLinkedClusterSets.add(currentSet);
+		    }
+		}
+	    }
+	    Set<Cluster> newLinkedClusterSet = new HashSet<Cluster>();
+	    newLinkedClusterSet.addAll(touchedClusters);
+	    for (Set<Cluster> oldSet : oldLinkedClusterSets) {
+		newLinkedClusterSet.addAll(oldSet);
+		mipOverlapSets.remove(oldSet);
+	    }
+	    mipOverlapSets.add(newLinkedClusterSet);
+	}
+
+	// Verify that each cluster appears in exactly one set
+	List<Cluster> countedClusterList = new Vector<Cluster>();
+	Set<Cluster> countedClusterSet = new HashSet<Cluster>();
+	for (Set<Cluster> currentSet : mipOverlapSets) {
+	    countedClusterList.addAll(currentSet);
+	    countedClusterSet.addAll(currentSet);
+	}
+	if (countedClusterList.size() != MapTrkToMIP.size()) { throw new AssertionError("Book-keeping error"); }
+	if (countedClusterSet.size() != MapTrkToMIP.size()) { throw new AssertionError("Book-keeping error"); }
+
+	// Do the merge of overlapping MIPs
+	for (Set<Cluster> currentSet : mipOverlapSets) {
+	    if (currentSet.size()==0) {
+		throw new AssertionError("Empty set!");
+	    } else if (currentSet.size()==1) {
+		Cluster mip = currentSet.iterator().next();
+		mapMipToMergedCluster.put(mip,mip);
+		Track tr = mapMipToTrack.get(mip);
+		List<Track> mergedTracks = new Vector<Track>();
+		mergedTracks.add(tr);
+		mapMergedClusterToTracks.put(mip, mergedTracks);
+	    } else {
+		BasicCluster mergedMip = new BasicCluster();
+		List<Track> mergedTracks = new Vector<Track>();
+		Set<CalorimeterHit> mergedHits = new HashSet<CalorimeterHit>();
+		for (Cluster mip : currentSet) {
+		    mergedHits.addAll(mip.getCalorimeterHits());
+		    Track tr = mapMipToTrack.get(mip);
+		    mergedTracks.add(tr);
+		}
+		for (CalorimeterHit hit : mergedHits) {
+		    mergedMip.addHit(hit);
+		}
+		for (Cluster clus : currentSet) {
+		    mapMipToMergedCluster.put(clus, mergedMip);
+		    mapMergedClusterToTracks.put(mergedMip, mergedTracks);
+		}
+	    }
+	}
+
+	// Assign MIPs to tracks, taking overlaps into account
+	for (Cluster mergedMip : mapMergedClusterToTracks.keySet()) {
+	    List<Track> tracks = mapMergedClusterToTracks.get(mergedMip);
+	    if (tracks == null) { throw new AssertionError("Null tracks!"); }
+	    if (tracks.size()==0) { 
+		throw new AssertionError("Empty track list!"); 
+	    } else if (tracks.size()==1) {
+		// Unique
+		Track tr = tracks.get(0);
+		if (mergedMip.getCalorimeterHits().size() > 5) {
+		    // Found a good MIP
+		    outputMap.put(tr, mergedMip);
+		    outputClustersMips.add(mergedMip);
+		} else {
+		    // Didn't find a good mip
+		    outputClustersSmall.add(mergedMip);
+		}
+	    } else {
+		// Overlap -- can't treat it as a MIP.
+		outputClustersBlocks.add(mergedMip);
+	    }
+	}
+
+	// Identify unmatched tracks
+	List<Track> unmatchedTracks = new Vector<Track>();
+	unmatchedTracks.addAll(inputTrackList);
+	unmatchedTracks.removeAll(outputMap.keySet());
+
+	// All done
+	int flag = 1<<org.lcsim.util.lcio.LCIOConstants.CLBIT_HITS;
+	event.put(m_outputMips, outputClustersMips, Cluster.class, flag);
+	event.put(m_outputBlocks, outputClustersBlocks, Cluster.class, flag);
+	event.put(m_outputSmallClusters, outputClustersSmall, Cluster.class, flag);
+	event.put(m_outputUnmatchedTrackListName, unmatchedTracks);
+	return outputMap;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PFADetectorLayer.java added at 1.1
diff -N PFADetectorLayer.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ PFADetectorLayer.java	11 Apr 2012 15:49:36 -0000	1.1
@@ -0,0 +1,376 @@
+
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+import org.lcsim.geometry.*;
+
+public class PFADetectorLayer
+{
+    protected int m_id;
+    protected int m_layerNumber;
+    protected CalorimeterType m_type;
+
+    /**
+     * Default constructor: instantiate an invalid layer
+     */
+    public PFADetectorLayer()
+    {
+	m_id = -1;
+	m_type = CalorimeterType.UNKNOWN;
+    }
+
+    public PFADetectorLayer( int id , int layerNumber )
+    {
+	m_id = id;
+	m_layerNumber = layerNumber;
+
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	double n1 = ci.getNLayers( CalorimeterType.EM_BARREL );
+	double n2 = ci.getNLayers( CalorimeterType.HAD_BARREL );
+	double n3 = ci.getNLayers( CalorimeterType.MUON_BARREL );
+	double n4 = ci.getNLayers( CalorimeterType.EM_ENDCAP );
+	double n5 = ci.getNLayers( CalorimeterType.HAD_ENDCAP );
+	double n6 = ci.getNLayers( CalorimeterType.MUON_ENDCAP );
+	if     ( id < n1)        m_type = CalorimeterType.EM_BARREL;
+	else if( id < n1+n2 )    m_type = CalorimeterType.HAD_BARREL;
+	else if( id < n1+n2+n3 ) m_type = CalorimeterType.MUON_BARREL;
+	else if( id < n1+n2+n3+n4 )       m_type = CalorimeterType.EM_ENDCAP;
+	else if( id < n1+n2+n3+n4+n5 )    m_type = CalorimeterType.HAD_ENDCAP;
+	else if( id < n1+n2+n3+n4+n5+n6 ) m_type = CalorimeterType.MUON_ENDCAP;
+	else throw new AssertionError();
+    }
+
+    /**
+     * Constructor taking a CalorimeterHit
+     */
+    public PFADetectorLayer( CalorimeterHit hit )
+    {
+	IDDecoder idDecoder = hit.getIDDecoder();
+	idDecoder.setID( hit.getCellID() );
+	int layer = idDecoder.getLayer();
+
+	String calorimeterName = hit.getSubdetector().getName();
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+
+	// calculate the layer identifier:
+	// should preserve the order: inside-out, barrel-endcap
+	if(      calorimeterName.equals( ci.getName( CalorimeterType.EM_BARREL   ) ) ) { m_type = CalorimeterType.EM_BARREL;   m_id = layer + getSubDetectorOffset( m_type ); m_layerNumber = layer; }
+	else if( calorimeterName.equals( ci.getName( CalorimeterType.HAD_BARREL  ) ) ) { m_type = CalorimeterType.HAD_BARREL;  m_id = layer + getSubDetectorOffset( m_type ); m_layerNumber = layer; }
+	else if( calorimeterName.equals( ci.getName( CalorimeterType.MUON_BARREL ) ) ) { m_type = CalorimeterType.MUON_BARREL; m_id = layer + getSubDetectorOffset( m_type ); m_layerNumber = layer; }
+	else if( calorimeterName.equals( ci.getName( CalorimeterType.EM_ENDCAP   ) ) ) { m_type = CalorimeterType.EM_ENDCAP;   m_id = layer + getSubDetectorOffset( m_type ); m_layerNumber = layer; }
+	else if( calorimeterName.equals( ci.getName( CalorimeterType.HAD_ENDCAP  ) ) ) { m_type = CalorimeterType.HAD_ENDCAP;  m_id = layer + getSubDetectorOffset( m_type ); m_layerNumber = layer; }
+	else if( calorimeterName.equals( ci.getName( CalorimeterType.MUON_ENDCAP ) ) ) { m_type = CalorimeterType.MUON_ENDCAP; m_id = layer + getSubDetectorOffset( m_type ); m_layerNumber = layer; }
+	else throw new AssertionError();
+    }
+
+
+    public int getSubDetectorOffset( CalorimeterType type )
+    {
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+
+	if(      type == CalorimeterType.EM_BARREL )   return 0;
+	else if( type == CalorimeterType.HAD_BARREL )  return ci.getNLayers( CalorimeterType.EM_BARREL );
+	else if( type == CalorimeterType.MUON_BARREL ) return getSubDetectorOffset( CalorimeterType.HAD_BARREL )  + ci.getNLayers( CalorimeterType.HAD_BARREL );
+	else if( type == CalorimeterType.EM_ENDCAP )   return getSubDetectorOffset( CalorimeterType.MUON_BARREL ) + ci.getNLayers( CalorimeterType.MUON_BARREL );
+	else if( type == CalorimeterType.HAD_ENDCAP )  return getSubDetectorOffset( CalorimeterType.EM_ENDCAP )   + ci.getNLayers( CalorimeterType.EM_ENDCAP );
+	else if( type == CalorimeterType.MUON_ENDCAP ) return getSubDetectorOffset( CalorimeterType.HAD_ENDCAP )  + ci.getNLayers( CalorimeterType.HAD_ENDCAP );
+	else throw new AssertionError();
+    }
+
+    public int getLayerNumber()
+    {
+	return m_layerNumber;
+    }
+
+    /**
+     * returns the identifier of the layer
+     */
+    public int id()
+    {
+	return m_id;
+    }
+
+
+    /**
+     * returns the calorimeter type
+     */
+    public CalorimeterType calorimeterType()
+    {
+	return m_type;
+    }
+
+
+    /**
+     * returns the sub-detector name
+     */
+    public String subdetectorName()
+    {
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	return ci.getName(m_type);
+    }
+
+
+    /**
+     * Override the hashCode method to force the identifier to be used as key in maps.
+     */
+    public int hashCode()
+    {
+	return id();
+    }
+
+
+    /**
+     * Override the equals method to use identifiers to compare two layers.
+     */
+    public boolean equals( Object obj )
+    {
+	if( !(obj instanceof PFADetectorLayer) ) return false;
+	PFADetectorLayer layer = (PFADetectorLayer)obj;
+	return id() == layer.id();
+    }
+
+
+    /**
+     * Checks validity of the layer as representing a physical layer
+     */
+    public boolean isValid()
+    {
+	// checks for valid identifier and sub-detector type
+	return ( m_id >= 0 && m_type != CalorimeterType.UNKNOWN );
+    }
+
+
+    /**
+     * Returns the number of skipped layers from a given layer
+     * returned value should be 0 if layers are the same
+     * output should be negative if layers are not passed in the normal order (see LayerSort)
+     */
+    public int getSkipped( PFADetectorLayer layer )
+    {
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	if( isBarrel() && layer.isEndcap() ) return -10000;
+	if( isEndcap() && layer.isBarrel() ) return -10000;
+	int first, second;
+	first = 10000;
+	second = 10000;
+	if( isBarrel() )
+	    {
+		if( m_type == CalorimeterType.EM_BARREL )        first = getLayerNumber();
+		else if( m_type == CalorimeterType.HAD_BARREL )  first = ci.getNLayers( CalorimeterType.EM_BARREL ) + getLayerNumber();
+		else if( m_type == CalorimeterType.MUON_BARREL ) first = ci.getNLayers( CalorimeterType.EM_BARREL ) + ci.getNLayers( CalorimeterType.HAD_BARREL ) + getLayerNumber();
+
+		if( layer.calorimeterType() == CalorimeterType.EM_BARREL )        second = layer.getLayerNumber();
+		else if( layer.calorimeterType() == CalorimeterType.HAD_BARREL )  second = ci.getNLayers( CalorimeterType.EM_BARREL ) + layer.getLayerNumber();
+		else if( layer.calorimeterType() == CalorimeterType.MUON_BARREL ) second = ci.getNLayers( CalorimeterType.EM_BARREL ) + ci.getNLayers( CalorimeterType.HAD_BARREL ) + layer.getLayerNumber();
+	    }
+	else
+	    {
+		if( m_type == CalorimeterType.EM_ENDCAP )        first = getLayerNumber();
+		else if( m_type == CalorimeterType.HAD_ENDCAP )  first = ci.getNLayers( CalorimeterType.EM_ENDCAP ) + getLayerNumber();
+		else if( m_type == CalorimeterType.MUON_ENDCAP ) first = ci.getNLayers( CalorimeterType.EM_ENDCAP ) + ci.getNLayers( CalorimeterType.HAD_ENDCAP ) + getLayerNumber();
+
+		if( layer.calorimeterType() == CalorimeterType.EM_ENDCAP )        second = layer.getLayerNumber();
+		else if( layer.calorimeterType() == CalorimeterType.HAD_ENDCAP )  second = ci.getNLayers( CalorimeterType.EM_ENDCAP ) + layer.getLayerNumber();
+		else if( layer.calorimeterType() == CalorimeterType.MUON_ENDCAP ) second = ci.getNLayers( CalorimeterType.EM_ENDCAP ) + ci.getNLayers( CalorimeterType.HAD_ENDCAP ) + layer.getLayerNumber();		
+	    }
+	return ( first - second );
+    }
+
+    public double getCellSize() // to fixe
+    {
+	if(      m_type == CalorimeterType.EM_BARREL )   return 3.5;
+	else if( m_type == CalorimeterType.HAD_BARREL )  return 10.;
+	else if( m_type == CalorimeterType.MUON_BARREL ) return 1000000.; // to fixe
+	else if( m_type == CalorimeterType.EM_ENDCAP )   return 3.5;
+	else if( m_type == CalorimeterType.HAD_ENDCAP )  return 10.;
+	else if( m_type == CalorimeterType.MUON_ENDCAP ) return 1000000.; // to fixe
+	else return 1000000.;
+    }
+
+    public double getDistanceToIP()
+    {
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	if(      m_type == CalorimeterType.EM_BARREL )
+	    {
+		if     ( getLayerNumber() <= 20 ) return ci.getRMin( CalorimeterType.EM_BARREL ) + ( getLayerNumber() * 3.5 ) + 0.16;
+		else if( getLayerNumber() <= 30 ) return ci.getRMin( CalorimeterType.EM_BARREL ) + ( 3.5 * 20. ) + ( (getLayerNumber() - 20) * 6. ) + 0.16;
+		else throw new AssertionError();
+	    }
+	else if( m_type == CalorimeterType.HAD_BARREL )  return ci.getRMin( CalorimeterType.HAD_BARREL ) + ( getLayerNumber() * 28. ) + 21.7;
+	else if( m_type == CalorimeterType.MUON_BARREL ) return ci.getRMin( CalorimeterType.MUON_BARREL ) + getLayerNumber() * ( ci.getRMax( CalorimeterType.MUON_BARREL ) - ci.getRMin( CalorimeterType.MUON_BARREL ) ) / ci.getNLayers( CalorimeterType.MUON_BARREL );
+	else if( m_type == CalorimeterType.EM_ENDCAP )
+	    {
+		if     ( getLayerNumber() <= 20 ) return ci.getZMin( CalorimeterType.EM_ENDCAP ) + ( getLayerNumber() * 3.5 ) + 0.16;
+		else if( getLayerNumber() <= 30 ) return ci.getZMin( CalorimeterType.EM_ENDCAP ) + ( 3.5 * 20. ) + ( ( getLayerNumber() - 20 ) * 6. ) + 0.16;
+		else throw new AssertionError();
+	    }
+	else if( m_type == CalorimeterType.HAD_ENDCAP )  return ci.getZMin( CalorimeterType.HAD_ENDCAP ) + ( getLayerNumber() * 28. ) + 21.7;
+	else if( m_type == CalorimeterType.MUON_ENDCAP ) return ci.getZMin( CalorimeterType.MUON_ENDCAP ) + getLayerNumber() * ( ci.getZMax( CalorimeterType.MUON_ENDCAP ) - ci.getZMin( CalorimeterType.MUON_ENDCAP ) ) / ci.getNLayers( CalorimeterType.MUON_ENDCAP );
+	else throw new AssertionError();
+	//if( m_id < 20 ) return 3.5;
+	//else if( 20 <= baseLayer && baseLayer < 30 ) return 6.;
+	//else if( baseLayer == 30 ) return 31.54;
+	//else return 28.;	
+    }
+
+    public double getDistanceToLayer( PFADetectorLayer layer ) // to fixe
+    {
+	if( isBarrel() && layer.isEndcap() ) return -1000000.;
+	if( isEndcap() && layer.isBarrel() ) return -1000000.;
+	double d = getDistanceToIP() - layer.getDistanceToIP();
+	if( d < 0. ) d *= -1.;
+	return d;
+    }
+
+    public double getDistanceToNextLayer()
+    {
+	PFADetectorLayer nextLayer = getNextLayer();
+	if( nextLayer == null ) throw new AssertionError();
+	return nextLayer.getDistanceToIP() - getDistanceToIP();
+    }
+
+    public PFADetectorLayer getNextLayer()
+    {
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	if( isBarrel() && id() == ci.getNLayers( CalorimeterType.EM_BARREL ) + ci.getNLayers( CalorimeterType.HAD_BARREL ) + ci.getNLayers( CalorimeterType.MUON_BARREL ) - 1 ) return null;
+	if( isEndcap() && id() == ci.getNLayers( CalorimeterType.EM_ENDCAP ) + ci.getNLayers( CalorimeterType.HAD_ENDCAP ) + ci.getNLayers( CalorimeterType.MUON_ENDCAP ) - 1 ) return null;
+	return new PFADetectorLayer( id() + 1 , 0);
+    }
+
+    public double getDistanceBetweenLayers( PFADetectorLayer layer1 , PFADetectorLayer layer2 )
+    {
+	if( layer1.isBarrel() && !layer2.isBarrel() ) throw new AssertionError();
+	if( layer1.isEndcap() && !layer2.isEndcap() ) throw new AssertionError();
+	double d = layer1.getDistanceToIP() - layer2.getDistanceToIP();
+	if( d < 0 ) d *= -1.;
+	return d;
+    }
+
+    /**
+     * Returns true if the layer is in the barrel
+     */
+    public boolean isBarrel()
+    {
+	if( m_type == CalorimeterType.EM_BARREL   ) return true;
+	if( m_type == CalorimeterType.HAD_BARREL  ) return true;
+	if( m_type == CalorimeterType.MUON_BARREL ) return true;
+	return false;
+    }
+
+
+    /**
+     * Returns true if the layer is in the endcap
+     */
+    public boolean isEndcap()
+    {
+	if( m_type == CalorimeterType.EM_ENDCAP   ) return true;
+	if( m_type == CalorimeterType.HAD_ENDCAP  ) return true;
+	if( m_type == CalorimeterType.MUON_ENDCAP ) return true;
+	return false;
+    }
+
+
+    public Hep3Vector getInterceptPoint( Hep3Vector P , Hep3Vector v )
+    {
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	if( isEndcap() )
+	    {
+
+		double signOfZ = P.z() < 0. ? -1. : +1.;
+		Hep3Vector n = new BasicHep3Vector( 0 , 0 , 1 ); // vecteur de plan
+		Hep3Vector A = new BasicHep3Vector( 0 , 0 , signOfZ * getDistanceToIP() ); // point sur le plan
+		
+		double x,y,z;
+		x = (A.x()*n.x()) - (P.y()*n.y()) - (P.z()*n.z()) + ( ( (v.y()*n.y()) + (v.z()*n.z()) ) * (P.x()/v.x()) ) + (A.y()*n.y()) + (A.z()*n.z());
+		x /= ( n.x() + ( ( (v.y()*n.y()) + (v.z()*n.z()) ) / v.x() ) );
+		y = (A.y()*n.y()) - (P.x()*n.x()) - (P.z()*n.z()) + ( ( (v.x()*n.x()) + (v.z()*n.z()) ) * (P.y()/v.y()) ) + (A.x()*n.x()) + (A.z()*n.z());
+		y /= ( n.y() + ( ( (v.x()*n.x()) + (v.z()*n.z()) ) / v.y() ) );
+		z = (A.z()*n.z()) - (P.x()*n.x()) - (P.y()*n.y()) + ( ( (v.x()*n.x()) + (v.y()*n.y()) ) * (P.z()/v.z()) ) + (A.x()*n.x()) + (A.y()*n.y());
+		z /= ( n.z() + ( ( (v.x()*n.x()) + (v.y()*n.y()) ) / v.z() ) );
+		
+		double r = Math.sqrt( x*x + y*y );
+		double rmin = ci.getRMin( m_type );
+		double rmax = ci.getRMax( m_type );
+		if( rmin <= r && r <= rmax ) return new BasicHep3Vector( x , y , z );
+		else return null;
+	    }
+	else if( isBarrel() )
+	    {
+		double a,b,c,m,delta;
+		double x1,x2,y1,y2,z1,z2;
+		double R = getDistanceToIP(); // radius of the layer
+		
+		m = v.y() / v.x();
+		a = 1. + m*m;
+		b = ( 2. * m ) * ( P.y() - (m*P.x()) );
+		c = (m*m)*(P.x()*P.x()) - (2.*m*P.x()*P.y()) + (P.y()*P.y())- (R*R);
+		delta = (b*b) - (4.*a*c);
+		if( delta > 0. ) { x1 = ( - b - Math.sqrt(delta) ) / (2.*a); x2 = ( - b + Math.sqrt(delta) ) / (2.*a); }
+		else return null;
+		
+		m = v.x() / v.y();
+		a = 1. + m*m;
+		b = ( 2. * m ) * ( P.x() - (m*P.y()) );
+		c = (m*m)*(P.y()*P.y()) - (2.*m*P.y()*P.x()) + (P.x()*P.x())- (R*R);
+		delta = (b*b) - (4.*a*c);
+		if( delta > 0. ) { y1 = ( - b - Math.sqrt(delta) ) / (2.*a); y2 = ( - b + Math.sqrt(delta) ) / (2.*a); }
+		else return null;
+		
+		double cylinder11 = x1*x1 + y1*y1 - R*R;   cylinder11 = cylinder11 * cylinder11;
+		double cylinder12 = x1*x1 + y2*y2 - R*R;   cylinder12 = cylinder12 * cylinder12;
+		double cylinder21 = x2*x2 + y1*y1 - R*R;   cylinder21 = cylinder21 * cylinder21;
+		double cylinder22 = x2*x2 + y2*y2 - R*R;   cylinder22 = cylinder22 * cylinder22;
+		
+		z1 = ( v.z() / v.x() ) * ( x1 - P.x() ) + P.z();
+		z2 = ( v.z() / v.x() ) * ( x2 - P.x() ) + P.z();
+		
+		Vector<Hep3Vector> sol = new Vector<Hep3Vector>();;
+		if( cylinder11 < 1.e-6 ) sol.add( new BasicHep3Vector( x1 , y1 , z1 ) );
+		if( cylinder12 < 1.e-6 ) sol.add( new BasicHep3Vector( x1 , y2 , z1 ) );
+		if( cylinder21 < 1.e-6 ) sol.add( new BasicHep3Vector( x2 , y1 , z2 ) );
+		if( cylinder22 < 1.e-6 ) sol.add( new BasicHep3Vector( x2 , y2 , z2 ) );
+		if( sol.size() != 2 ) return null; //throw new AssertionError("found "+sol.size()+" solutions among: "+cylinder11+" "+cylinder12+" "+cylinder21+" "+cylinder22);
+		
+		Hep3Vector M = null;
+		if( VecOp.sub( sol.get(0) , P ).magnitude() < VecOp.sub( sol.get(1) , P ).magnitude() ) M = sol.get(0);
+		else M = sol.get(1);
+
+		double z = M.z();
+		double zmin = -1. * ci.getZMax( m_type );
+		double zmax = +1. * ci.getZMax( m_type );
+		if( zmin <= z && z <= zmax ) return M;
+		else return null;
+	    }
+	else throw new AssertionError();
+    }
+
+
+
+    /**
+     * Comparator ordering layers from inside-out, barrel-endcap
+     */
+    static public class LayerSort implements Comparator<PFADetectorLayer> {
+
+	// constructor
+	public LayerSort() {}
+
+	// compare the identiers
+	// the identifiers should be calculated such that to preserve the order: inside-out, barrel-endcap
+	public int compare(PFADetectorLayer l1, PFADetectorLayer l2) {
+	    if (l1.id() < l2.id()) {
+		return -1;
+	    } else if (l1.id() > l2.id()) {
+		return 1;
+	    } else {
+		return 0;
+	    }
+	}
+    }
+
+
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PFAWrapper.java added at 1.1
diff -N PFAWrapper.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ PFAWrapper.java	11 Apr 2012 15:49:36 -0000	1.1
@@ -0,0 +1,168 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural;
+
+import java.util.*;
+import org.lcsim.util.Driver;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.loop.LCIODriver;
+import org.lcsim.util.*;
+import org.lcsim.event.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.LikelihoodEvaluatorWrapper;
+import org.lcsim.recon.pfa.output.*;
+import org.lcsim.recon.util.CalInfoDriver;
+
+/**
+ * Example driver to create PFA output with a list of
+ * ReconstructedParticle objects. The output is written
+ * to "test.slcio". In addition, some dijet mass plots
+ * are written to "output-write.aida".
+ * 
+ * Beware that the LCIO output file will be quite large!
+ * Some attempt is made to slim it down, but if space is
+ * an issue you may need to cut out more of the output
+ * (see code in the writeOutMini() subroutine for how this
+ * is done).
+ *
+ * There is no acceptance cut built in, since this depends
+ * on what kind of event you are looking at.
+ */
+
+public class PFAWrapper extends Driver
+{
+
+    // input track collection
+    protected String trackInput = "Tracks";
+    public void setTrackInput(String tracks) { this.trackInput = tracks; }
+
+    // input MC list
+    protected String mcInput = "ReconFSParticles";
+    public void setMcInput(String mcList) { this.mcInput = mcList; }
+
+    // run mode: [0] analysis - [1] training
+    protected int runMode = 0;
+    public void setRunMode(int mode){ this.runMode = mode; }
+
+    // flag to run LOI baseline pfa
+    protected boolean doBaseline = false;
+    public void setDoBaseline(boolean baseline) { doBaseline = baseline; }
+
+    // flag to set number of iterations
+    protected int numberOfShowerBuildingIterations = 3;
+    public void setNumberOfShowerBuildingIterations(int n) { numberOfShowerBuildingIterations = n; }
+
+    protected SetUpPFA pfa;
+    protected EventSelection m_eventSelection;
+
+    /** Constructor sets up daughter drivers. */
+    public PFAWrapper()
+    {
+	// Cash general calorimeter information
+	add(new CalInfoDriver());
+
+	// Prepare to run PFA: digitize calorimeter hits
+	add(new org.lcsim.digisim.DigiPackageDriver());
+
+	// Set up and run PFA
+	pfa = new SetUpPFA();
+	add(pfa);
+
+	// Output
+	add(new FlushReconstructedParticlesDriver("DTreeReclusteredParticles", "FlushedDTreeReclusteredParticles", "FlushedDTreeReclusteredClusters"));
+
+	// event selection
+	m_eventSelection = new EventSelection(0, 0.95);
+	
+	writeOutMini("full.slcio"); // Write out to an LCIO file
+    }
+
+    protected boolean m_init = false;
+    protected void process(EventHeader event){
+
+	if(!m_init){
+	    m_init = true;
+	    
+	    pfa.setTrackList(trackInput);
+	    pfa.setMcList(mcInput);
+	    pfa.setRunMode(runMode);
+	    pfa.doBaseline(doBaseline);
+	    pfa.numberOfIterations(numberOfShowerBuildingIterations);
+	}
+
+	//	if(m_eventSelection.pass(event)){
+	    super.process(event);
+	    //}
+    }
+
+    void writeOutMini(String filename) {
+	// Avoid writing out a lot of things that were not in the original event and
+	//   a) were generated and used by us internally, and will not be needed; or
+	//   b) were generated by someone else.
+	
+	Vector<String> v = new Vector<String>();
+	// Particles
+        v.add("CheatReconstructedParticles");
+	v.add("TempCheatParticles");
+	v.add("GenPerfectReconParticles");
+	v.add("GenPerfectReconParticles");
+	v.add("ReconPerfectReconParticles");
+	v.add("GenPerfectVisReconParticles");
+	v.add("ReconPerfectVisReconParticles");
+	// Tracks & track hits
+        v.add("TkrEndcapHitsCheatTrackerHits");
+        v.add("VtxEndcapHitsCheatTrackerHits");
+        v.add("VtxBarrHitsCheatTrackerHits");
+	v.add("TkrForwardHitsCheatTrackerHits");
+	v.add("TkrBarrHitsCheatTrackerHits");
+        v.add("VtxEndcapHitsCheatTracks");
+        v.add("VtxBarrHitsCheatTracks");
+        v.add("TkrBarrHitsCheatTracks");
+        v.add("TkrForwardHitsCheatTracks");
+        v.add("TkrEndcapHitsCheatTracks");
+	v.add("TempCheatTracks");
+	v.add("PerfectTracks");
+	v.add("RefinedCheatTracks");
+	// CalorimeterHits
+	v.add("EcalBarrRawHits");
+	v.add("ForwardEcalEndcapRawHits");
+	v.add("EcalEndcapRawHits");
+	v.add("HcalBarrRawHits");
+	v.add("HcalEndcapRawHits");
+	v.add("EcalBarrDigiHits");
+	v.add("EcalBarrelDigiHits");
+	v.add("EcalEndcapDigiHits");
+	v.add("HcalBarrDigiHits");
+	v.add("HcalBarrelDigiHits");
+	v.add("HcalEndcapDigiHits");
+	v.add("MuonBarrDigiHits");
+	v.add("MuonBarrelDigiHits");
+	v.add("MuonEndcapDigiHits");
+	// Clusters
+        v.add("EcalEndcapDigiHitsCheatClusters");
+        v.add("EcalBarrDigiHitsCheatClusters");
+        v.add("HcalBarrDigiHitsCheatClusters");
+        v.add("HcalEndcapDigiHitsCheatClusters");
+	v.add("ReDTEcalClusters");
+	v.add("HcalEndcapDigiHitsDTreeClusters");
+	v.add("HcalBarrDigiHitsDTreeClusters");
+	v.add("NonFSReconClusters");
+	v.add("ReconClusters");
+	v.add("EcalBarrDigiHitsDTreeClusters");
+	v.add("EcalEndcapDigiHitsDTreeClusters");
+	v.add("RefinedCheatClusters");
+	v.add("DTEcalClusters");
+	v.add("TMClusters");
+	// Maps
+        v.add("EcalEndcapRaw2sim");
+	v.add("HcalBarrRaw2sim");
+        v.add("HcalEndcapRaw2sim");
+        v.add("EcalBarrRaw2sim");
+	v.add("ForwardEcalEndcapRaw2sim");
+	v.add("TracksToMCP");
+	v.add("CheatTracksToMCP");
+	// Misc
+        v.add("MCParticleEndPointEnergy");
+
+	add(new org.lcsim.util.loop.LCIODriver(filename, v));
+    }
+}
+
+

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
SlicedShowerBuilder.java added at 1.1
diff -N SlicedShowerBuilder.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ SlicedShowerBuilder.java	11 Apr 2012 15:49:36 -0000	1.1
@@ -0,0 +1,3754 @@
+
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural;
+
+import java.util.*;
+import hep.physics.vec.*;
+import hep.physics.particle.properties.*;
+import org.lcsim.event.*;
+import org.lcsim.event.base.*;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.clumpfinder.kmean.*;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
+import org.lcsim.math.probability.Erf;
+
+public class SlicedShowerBuilder 
+{
+
+    protected PropertyContainer m_properties;
+    protected PFABookKeepingBroker m_bookKeeper;
+
+    protected HelixExtrapolator m_extrapolator = null;
+
+    protected ClusterEnergyCalculator m_chargedCalib;
+    protected ClusterEnergyCalculator m_neutralCalib;
+
+    protected Map<CalorimeterHit, CalorimeterHitType> m_hitTypesMap;
+    protected Map<CalorimeterHit, Cluster> m_hitToRigidClusterMap;
+
+    // algorithm parameters
+    protected double m_minDistanceToMipExtrapolationCut = 2; // in units of cell size...
+    protected double m_mipToMipProximityCut = 2; // in units of cell size...
+    protected double m_figureOfMeritCutForSmallClusterAssociations = 0.999;
+    protected double m_figureOfMeritCutForLargeClusterAssociations = 0.1;
+    protected int m_maxSkippedLayersForCandidateAssociations = 1;
+    protected int m_maxSkippedLayersForMipToMipAssociations = 2;
+    protected int m_minSizeToTreatAsBigCluster = 9;
+    protected double m_maxNumOfCellToIncludeSingleHit = 3.;
+
+    protected double m_energyMomentumTolerance = 2.; // sigmas
+    protected double m_maxEnergyOfTinyFragments = 1.75; // GeV
+    protected double m_maxCosAngleToConnectTinyShowers = 0.;
+
+    protected boolean m_debug = false;
+
+    /**
+     * Constructor
+     */
+    public SlicedShowerBuilder(PFABookKeepingBroker bookKeeper,
+			       PropertyContainer properties,
+			       ClusterEnergyCalculator chargedCalib,
+			       ClusterEnergyCalculator neutralCalib,
+			       HelixExtrapolator extrapolator)
+    {
+	m_properties = properties;
+	m_bookKeeper = bookKeeper;
+	m_chargedCalib = chargedCalib;
+	m_neutralCalib = neutralCalib;
+        m_extrapolator = extrapolator;
+    }
+
+
+
+    /*******************************************/
+    /*****   M A I N   A L G O R I T H M   *****/
+    /*******************************************/
+
+
+    /**
+     * build hadron showers
+     */
+    public List<ShowerWithBranches> buildShowers(Collection<CalorimeterHit> inputHits, Collection<CalorimeterHit> hitsToShare){
+
+	/* --------- */ printDebug("buildShowers() called with "+      /* --------- */ 
+        /* --------- */            inputHits.size()+" hits in input"); /* --------- */ 
+
+
+	/* --------- */ printDebug("resetting maps"); /* --------- */ 
+
+
+	// reset maps
+	m_hitTypesMap = new HashMap<CalorimeterHit, CalorimeterHitType>();
+	m_hitToRigidClusterMap = new HashMap<CalorimeterHit, Cluster>();
+
+
+	/* --------- */ printDebug("Creating map between hits and rigid clusters"); /* --------- */ 
+
+
+	// create a map from the hits belonging to MIPs to their corresponding MIP object
+	createHitToRigidClusterMap(inputHits);
+
+
+	/* --------- */ printDebug("Categorizing hits"); /* --------- */ 
+
+
+	// categorize hits
+	categorizeHits(inputHits);
+
+
+	/* --------- */ printDebug("Sorting hits into layers"); /* --------- */ 
+
+
+	// sort the hits into layers
+	Map<PFADetectorLayer, List<CalorimeterHit>> layerToHitListMap = sortHitsIntoLayers(inputHits);
+
+
+	/* --------- */ printDebug("Clustering hits into slices"); /* --------- */ 
+
+
+	// cluster the hits in the form of 2D cluster splices
+	Map<PFADetectorLayer, List<Cluster>> clusterSlices = getClusterSlices(layerToHitListMap);
+
+	// check that all hits have been used once and only once
+	{
+	    List<Cluster> allSlicesList = new Vector<Cluster>();
+	    for(PFADetectorLayer layer : clusterSlices.keySet()) {
+		List<Cluster> slicesForLayer = clusterSlices.get(layer);
+		allSlicesList.addAll(slicesForLayer);
+	    }
+	    checkConsistancyOfHitUsage(inputHits, allSlicesList);
+	}
+
+
+	/* --------- */ printDebug("Create map from cluster slices to layers"); /* --------- */ 
+
+
+	// for ease of book-keeping, create a map from a cluster to its corresponding layer
+	Map<Cluster, PFADetectorLayer> clusterSliceToLayerMap = new HashMap<Cluster, PFADetectorLayer>();
+	for(PFADetectorLayer layer : clusterSlices.keySet()) {
+	    List<Cluster> clustersForLayer = clusterSlices.get(layer);
+	    for(Cluster clus : clustersForLayer) {
+		clusterSliceToLayerMap.put(clus, layer);
+	    }
+	}
+
+
+	/* --------- */ printDebug("Sorting layers in inside/out-barrel/endcap order"); /* --------- */
+
+
+	// sort layers from inside out
+	List<PFADetectorLayer> allOccupiedLayers = new Vector<PFADetectorLayer>();
+	allOccupiedLayers.addAll(layerToHitListMap.keySet());
+	Collections.sort(allOccupiedLayers, new PFADetectorLayer.LayerSort());
+
+	// list to store output showers
+       	List<ShowerWithBranches> reconstructedShowers = new Vector<ShowerWithBranches>();
+
+
+	/* --------- */ printDebug("Start loop on "+allOccupiedLayers.size()+" layers"); /* --------- */
+
+
+	// loop on layers and build shower
+	for(PFADetectorLayer layer : allOccupiedLayers) {
+
+
+	    /* --------- */ printDebug("Treating layer "+layer.id()); /* --------- */
+
+
+	    // get the 2D cluster slices for this layer
+	    List<Cluster> clusterSlicesOnThisLayer = clusterSlices.get(layer);
+
+
+	    /* --------- */ printDebug("There is "+clusterSlicesOnThisLayer.size()+" clusters on layer"); /* --------- */ 
+	    /* --------- */ printDebug("Creating candidate associations");                                /* --------- */ 
+
+
+	    // find candidates for associations
+ 	    Map<Cluster, List<ShowerBranch>> clusterToShowerCandidateMap = new HashMap<Cluster, List<ShowerBranch>>();
+ 	    Map<ShowerBranch, List<Cluster>> showerToClusterCandidateMap = new HashMap<ShowerBranch, List<Cluster>>();
+ 	    createCandidateAssociations(clusterSlicesOnThisLayer, reconstructedShowers, clusterSliceToLayerMap, clusterToShowerCandidateMap, showerToClusterCandidateMap);
+
+
+	    /* --------- */ printDebug("Found "+clusterToShowerCandidateMap.keySet().size()+ /* --------- */ 
+            /* --------- */            " clusters assigned to at least one shower");         /* --------- */ 
+
+
+	    /* --------- */ printDebug("Resolving ambiguities"); /* --------- */ 
+
+
+	    // resolve ambiguous associations
+	    // this needs the information from the next layer
+	    List<Cluster> clusterSlicesOnNextLayer = null;
+	    int indexOfNextLayer = allOccupiedLayers.indexOf(layer) + 1;
+	    if(indexOfNextLayer < allOccupiedLayers.size()) {
+		PFADetectorLayer nextLayer = allOccupiedLayers.get(indexOfNextLayer);
+		clusterSlicesOnNextLayer = clusterSlices.get(nextLayer);
+	    }
+	    resolveAmbiguities(clusterSlicesOnNextLayer,
+			       clusterToShowerCandidateMap,
+			       showerToClusterCandidateMap);
+
+
+	    /* --------- */ printDebug("Assigning clusters to showers"); /* --------- */ 
+
+
+	    // perform the actual association
+	    assignClustersToShowers(clusterSlicesOnThisLayer,
+				    clusterSliceToLayerMap,
+				    clusterToShowerCandidateMap,
+				    showerToClusterCandidateMap);
+
+	    // create showers for unassigned clusters
+	    List<Cluster> unassignedClusters = new Vector<Cluster>();
+	    unassignedClusters.addAll(clusterSlicesOnThisLayer);
+	    unassignedClusters.removeAll(clusterToShowerCandidateMap.keySet());
+
+
+	    /* --------- */ printDebug("creating clusters for "+unassignedClusters.size()+" unassigned Clsuters"); /* --------- */ 
+
+
+	    reconstructedShowers.addAll(createShowersForClusters(unassignedClusters));
+	}
+
+
+	/* --------- */ printDebug("Done loop on layers; output contains "+reconstructedShowers.size()+" showers"); /* --------- */
+
+
+	// now assign the shared hits
+	shareLeftoverHits(reconstructedShowers, hitsToShare);
+
+
+	/* --------- */ printDebug("Now apply final adjustments to showers: link secondary neutrals and identify unphysical cases"); /* --------- */
+
+
+	// now apply overrides:
+	//  - link secondary neutrals
+	//  - identify unphisical cases
+	//	applyOverrides(reconstructedShowers);
+
+
+	// before returning, check that all hits have been used and that no hit has been used in more than one shower
+	List<Cluster> allShowerCores = new Vector<Cluster>();
+	for(ShowerWithBranches shower : reconstructedShowers) {
+	    allShowerCores.add(shower.getCluster());
+	}
+	checkConsistancyOfHitUsage(inputHits, allShowerCores);
+
+	// output
+	return reconstructedShowers;
+    }
+
+
+
+
+    /***************************************************/
+    /*****   A L G O R I T H M I C   P I E C E S   *****/
+    /***************************************************/
+
+
+    /**
+     * creates a map from hits belonging to MIPs (seeds) to their corresponding MIP (seed) object
+     */
+    protected void createHitToRigidClusterMap( Collection<CalorimeterHit> inputHits )
+    {
+	Collection<Cluster> mips = m_bookKeeper.getClusterList( "Mips" );
+	Collection<Cluster> seeds = m_bookKeeper.getClusterList( "Seeds" );
+	
+	for( CalorimeterHit hit : inputHits )
+	    {
+		for( Cluster mip : mips )
+		    {
+			if( mip.getCalorimeterHits().contains( hit ) )
+			    {
+				m_hitToRigidClusterMap.put( hit , mip );
+				break;
+			    }
+		    }
+		
+		for( Cluster seed : seeds )
+		    {
+			if( seed.getCalorimeterHits().contains( hit ) )
+			    {
+				m_hitToRigidClusterMap.put( hit , seed );
+				break;
+			    }
+		    }
+	    }
+    }
+
+
+    /**
+     * categorize hits into hits from MIPs, non-MIPs, seeds and non-seeds
+     */
+    protected void categorizeHits( Collection<CalorimeterHit> inputHits )
+    {
+	Collection<Cluster> mips = m_bookKeeper.getClusterList( "Mips" );
+	Collection<Cluster> seeds = m_bookKeeper.getClusterList( "Seeds" );
+	
+	for( CalorimeterHit hit : inputHits )
+	    {
+		boolean isMip = false;
+		for( Cluster mip : mips )
+		    {
+			if( mip.getCalorimeterHits().contains( hit ) ) 
+			    { 
+				isMip = true;
+				break;
+			    }
+		    }
+		
+		boolean isSeed = false;
+		for( Cluster seed : seeds )
+		    {
+			if( seed.getCalorimeterHits().contains( hit ) )
+			    {
+				isSeed = true;
+				break;
+			    }
+		    }
+
+		if( isMip )
+		    {
+			if( isSeed ) m_hitTypesMap.put( hit , CalorimeterHitType.CHT_MIP_SEED );
+			else         m_hitTypesMap.put( hit , CalorimeterHitType.CHT_MIP_NONSEED );
+		    }
+		else
+		    {
+			if( isSeed ) m_hitTypesMap.put( hit , CalorimeterHitType.CHT_NONMIP_SEED );
+			else         m_hitTypesMap.put( hit , CalorimeterHitType.CHT_NONMIP_NONSEED );
+		    }
+	    }
+    }
+
+
+    /**
+     * This method takes a hit list as input and returns a map of hit lists sorted into layers
+     */
+    Map<PFADetectorLayer, List<CalorimeterHit>> sortHitsIntoLayers(Collection<CalorimeterHit> hits) {
+
+	// creates the output
+	Map<PFADetectorLayer, List<CalorimeterHit>> layerToHitListMap = new HashMap<PFADetectorLayer, List<CalorimeterHit>>();
+
+	// loop on input hits
+	for(CalorimeterHit hit : hits){
+
+	    // get the layer for the current hit
+	    PFADetectorLayer layerOfHit = new PFADetectorLayer(hit);
+
+	    // hit position
+	    Hep3Vector hitPos = new BasicHep3Vector(hit.getPosition());
+
+
+	    /* --------- */ printDebug("Found hit with "+                                             /* --------- */
+            /* --------- */           "[R = "+Math.sqrt(hitPos.x()*hitPos.x()+hitPos.y()*hitPos.y())+ /* --------- */
+	    /* --------- */           ", Z = "+hitPos.z()+                                            /* --------- */
+	    /* --------- */           "] at layer "+layerOfHit.id()+                                  /* --------- */
+	    /* --------- */           "which is on detector "+layerOfHit.subdetectorName()+           /* --------- */
+	    /* --------- */           " at distance "+layerOfHit.getDistanceToIP()+" from IP");       /* --------- */
+
+
+	    // get the hits already found on the same layer
+	    List<CalorimeterHit> hitsOnSameLayer = layerToHitListMap.get(layerOfHit);
+	    // add to the map
+	    if(hitsOnSameLayer == null){
+		hitsOnSameLayer = new Vector<CalorimeterHit>();
+		layerToHitListMap.put(layerOfHit, hitsOnSameLayer);
+	    }
+	    hitsOnSameLayer.add(hit);
+	}
+
+	return layerToHitListMap;
+    }
+
+
+    /**
+     * This methods provide 2D clusters split into layers 
+     */
+    protected Map<PFADetectorLayer, List<Cluster>> getClusterSlices(Map<PFADetectorLayer, List<CalorimeterHit>> layerToHitListMap){
+
+	// output
+	Map<PFADetectorLayer, List<Cluster>> slices = new HashMap<PFADetectorLayer, List<Cluster>> ();
+
+	// configure the k-mean cluster finder
+	KMeanClumpFinder clus2DFinder = new KMeanClumpFinder();
+	
+	// input parameters
+	// seed finding
+	int SDT333 = 0;
+	int SDT553 = 0;
+	int SST = 1;
+	// cluster finding
+	int CDT333 = 0;
+	int CDT553 = 0;
+	int CST = 1;
+	// number of iterations
+	int maxItr = 1;
+	
+	KMeanParameters kMeanParameters = new KMeanParameters( SDT333 , SDT553 , SST , CDT333 , CDT553 , CST , maxItr );
+	clus2DFinder.setParameters( kMeanParameters );
+
+	// run k-mean on each layer
+	for(PFADetectorLayer layer : layerToHitListMap.keySet()){
+
+	    // get the hits on the layer
+	    List<CalorimeterHit> hitsOnLayer = layerToHitListMap.get(layer);
+
+	    // filter out mip hits
+	    List<CalorimeterHit> nonMipHitsOnLayer = new Vector<CalorimeterHit>();
+	    for(CalorimeterHit hitToFilter : hitsOnLayer){
+		CalorimeterHitType type = getHitType(hitToFilter);
+		boolean keep = true;
+		if(type == CalorimeterHitType.CHT_UNKNOWN) keep = false;
+                if(type == CalorimeterHitType.CHT_MIP_SEED) keep = false;
+                if(type == CalorimeterHitType.CHT_MIP_NONSEED) keep = false;
+		if(type == CalorimeterHitType.CHT_NONMIP_SEED) keep = false;
+		if(keep) {
+		    nonMipHitsOnLayer.add(hitToFilter);
+		}
+	    }
+
+	    // run k-mean
+	    List<Cluster> listClus2D = clus2DFinder.createClusters( nonMipHitsOnLayer );
+
+	    // deal with single hit clusters
+            List<Cluster> listSingleHit = new Vector<Cluster>();
+            List<Cluster> listMultiHit = new Vector<Cluster>();
+            for( Cluster clus2D : listClus2D ) {
+		if( clus2D.getCalorimeterHits().size() == 1 ) {
+		    listSingleHit.add( clus2D );
+		}else{
+		    listMultiHit.add( clus2D );
+                }
+	    }
+            for( Cluster clus2D1 : listSingleHit ) {
+		Hep3Vector vPos1 = new BasicHep3Vector( clus2D1.getPosition() );
+		double distMin = m_maxNumOfCellToIncludeSingleHit * layer.getCellSize();
+		Cluster clus2Dmin = null;
+		for( Cluster clus2D2 : listMultiHit ) {
+		    for( CalorimeterHit hit : clus2D2.getCalorimeterHits() ) {
+			Hep3Vector vPos2 = new BasicHep3Vector( hit.getPosition() );
+			double mDelta = VecOp.sub( vPos1 , vPos2 ).magnitude();
+			if( mDelta < distMin ) {
+			    distMin = mDelta;
+			    clus2Dmin = clus2D2;
+			}
+		    }
+		}
+		if( clus2Dmin != null ) {
+		    for( CalorimeterHit hit : clus2D1.getCalorimeterHits() ) {
+			((BasicCluster)clus2Dmin).addHit( hit );
+		    }
+		    listClus2D.remove( clus2D1 );
+		}
+	    }
+
+	    // after k-mean: check that every hit has been used once and only once
+	    checkConsistancyOfHitUsage(nonMipHitsOnLayer, listClus2D);
+	    
+	    // add hits from MIPs as small clusters
+	    Map<Cluster, BasicCluster> clusterToSliceMap = new HashMap<Cluster, BasicCluster>();
+	    for(CalorimeterHit hit : hitsOnLayer){
+		if( nonMipHitsOnLayer.contains(hit) ) continue;
+		Cluster clusterForHit = m_hitToRigidClusterMap.get(hit);
+		if(clusterForHit == null) throw new AssertionError("inconsistent book-keeping");
+		BasicCluster sliceForHit = clusterToSliceMap.get(clusterForHit);
+		if(sliceForHit == null){
+		    sliceForHit = new BasicCluster();
+		    clusterToSliceMap.put(clusterForHit, sliceForHit);
+		}
+		sliceForHit.addHit(hit);
+	    }
+	    listClus2D.addAll(clusterToSliceMap.values());
+
+	    // after adding MIPs: check that every hit has been used once and only once
+	    checkConsistancyOfHitUsage(hitsOnLayer, listClus2D);
+
+	    // save the result
+	    slices.put(layer, listClus2D);
+	}
+
+	return slices;
+    }
+
+
+    /**
+     * create candidates for associations between clusters and showers
+     */
+    protected void createCandidateAssociations(List<Cluster> clusterSlicesOnThisLayer,
+					       List<ShowerWithBranches> reconstructedShowers,
+					       Map<Cluster, PFADetectorLayer> clusterSliceToLayerMap,
+					       Map<Cluster, List<ShowerBranch>> clusterToShowerCandidateMap,
+					       Map<ShowerBranch, List<Cluster>> showerToClusterCandidateMap) {
+
+
+	/* --------- */ printDebug("createCandidateAssociations() called with "+                     /* --------- */ 
+        /* --------- */            clusterSlicesOnThisLayer.size()+" clusters and "+                 /* --------- */ 
+	/* --------- */            reconstructedShowers.size()+" previously reconstructed showers"); /* --------- */ 
+
+
+	/* --------- */ printDebug("Start loop on clusters"); /* --------- */ 
+
+
+ 	// find candidates for associations
+ 	for( Cluster clus2D : clusterSlicesOnThisLayer ) {
+
+ 	    // get the layer for this cluster
+ 	    PFADetectorLayer layer = clusterSliceToLayerMap.get(clus2D);
+ 	    if(layer == null) throw new AssertionError("book-keeping error");
+
+
+	    /* --------- */ printDebug("Finding candidate assotiations for cluster with "+               /* --------- */ 
+	    /* --------- */            clus2D.getCalorimeterHits().size()+" hits on layer "+layer.id()); /* --------- */ 
+
+
+	    /* --------- */ printDebug("Start loop on showers"); /* --------- */ 
+
+
+ 	    // loop on shower branches
+ 	    for( ShowerWithBranches shower : reconstructedShowers ) {
+
+
+		/* --------- */ printDebug("Shower has "+shower.getBranches().size()+" branch"); /* --------- */ 
+		/* --------- */ printDebug("Start loop on branches");                            /* --------- */ 
+
+
+ 		for( ShowerBranch branch : shower.getBranches() ) {
+
+
+		    /* --------- */ printDebug("Shower branch has "+branch.getClusters().size()+" already assigned clusters"); /* --------- */ 
+
+
+ 		    // get the last added cluster to the shower and the corresponding layer
+ 		    Cluster lastAddedClus = branch.getLastAddedCluster();
+ 		    PFADetectorLayer lastAddedLayer = clusterSliceToLayerMap.get(lastAddedClus);
+ 		    if(lastAddedLayer == null) throw new AssertionError("book-keeping error: ");
+
+
+		    /* --------- */ printDebug("Last added cluster has "+lastAddedClus.getCalorimeterHits().size()+" hits and was on layer "+lastAddedLayer.id()); /* --------- */ 
+
+
+		    // take the decision to connect or not
+		    if(isGoodCandidateAssociation( lastAddedClus, clus2D)) {
+
+
+			/* --------- */ printDebug("Decision was in favor of connecting to this branch: proceed with this branch"); /* --------- */ 
+			/* --------- */ printDebug("Adding this branch to the list of this cluster's candidates");                 /* --------- */
+
+
+			// add to the cluster to shower map
+			List<ShowerBranch> showersForClus = clusterToShowerCandidateMap.get(clus2D);
+			if( showersForClus == null ) {
+			    showersForClus = new Vector<ShowerBranch>();
+			    clusterToShowerCandidateMap.put( clus2D , showersForClus );
+			}
+
+
+			/* --------- */ printDebug("Cluster already had "+showersForClus.size()+" branch candidates: adding a new one"); /* --------- */ 
+
+
+			showersForClus.add( branch );
+
+
+			/* --------- */ printDebug("Cluster has "+showersForClus.size()+" branch candidates after adding a this branch"); /* --------- */
+ 			/* --------- */ printDebug("Adding this cluster to the list of this branche's candidates");                       /* --------- */ 
+
+
+			// add to the shower to cluster map
+			List<Cluster> clustersForShower = showerToClusterCandidateMap.get( branch );
+			if( clustersForShower == null ) {
+			    clustersForShower = new Vector<Cluster>();
+			    showerToClusterCandidateMap.put( branch , clustersForShower );
+			}
+
+
+			/* --------- */ printDebug("Branch already had "+clustersForShower.size()+" cluster candidates: adding a new one"); /* --------- */ 
+
+
+			clustersForShower.add( clus2D );
+
+
+			/* --------- */ printDebug("Branch has "+clustersForShower.size()+" cluster candidates after addind this cluster"); /* --------- */ 
+
+
+		    } else {
+
+
+			/* --------- */ printDebug("Decision was not in favor of connecting to this branch: proceed to next branch"); /* --------- */ 
+
+
+		    }
+		}
+
+
+		/* --------- */ printDebug("Done with loop on branches for this shower");  /* --------- */
+
+
+	    }
+
+
+	    /* --------- */ printDebug("Done with loop on showers for this cluster");  /* --------- */
+
+
+	}
+
+
+	/* --------- */ printDebug("Done with loop on clusters: leaving createCandidateAssociations()");  /* --------- */
+
+
+    }
+
+
+    /**
+     * Resolve cases where a cluster is assigned to multiple showers
+     * as well as cases where multiple clusters are assigned to the same shower
+     */
+    protected void resolveAmbiguities( List<Cluster> clusterSlicesOnNextLayer,
+				       Map<Cluster, List<ShowerBranch>> clusterToShowerCandidateMap,
+				       Map<ShowerBranch, List<Cluster>> showerToClusterCandidateMap )
+    {
+
+	// first make lists of concurrent connections
+	List< List<ConnectionCandidate> > concurrentConnectionsLists = getConcurrentConnections( clusterToShowerCandidateMap, showerToClusterCandidateMap );
+
+	// loop on concurrent connections lists and resolve ambiguities
+	for(List<ConnectionCandidate> concurrentConnections : concurrentConnectionsLists) {
+
+	    // resolve the ambiguities
+	    List<ConnectionCandidate> resolvedConnections = resolveConcurrentConnections(concurrentConnections, 
+											 clusterToShowerCandidateMap,
+											 showerToClusterCandidateMap,
+											 clusterSlicesOnNextLayer );
+
+	    // remove vetoed connections from the input maps
+	    List<ConnectionCandidate> vetoedConnections = new Vector<ConnectionCandidate>();
+	    vetoedConnections.addAll(concurrentConnections);
+	    vetoedConnections.removeAll(resolvedConnections);
+	    for(ConnectionCandidate connection : vetoedConnections) {
+
+		// get the shower branch and the cluster objects
+		ShowerBranch branch = connection.getBranch();
+		Cluster cluster = connection.getCluster();
+
+		// remove from the cluster to shower branch map
+		List<ShowerBranch> branchesForCluster = clusterToShowerCandidateMap.get(cluster);
+		if(branchesForCluster == null) continue; //throw new AssertionError("book-keeping error");
+		branchesForCluster.remove(branch);
+		if(branchesForCluster.size() == 0) {
+		    clusterToShowerCandidateMap.remove(cluster);
+		}
+
+		// remove from the shower branch to cluster map
+		List<Cluster> clustersForBranch = showerToClusterCandidateMap.get(branch);
+		// if(clustersForBranch == null) throw new AssertionError("book-keeping error");
+		if(clustersForBranch != null){
+		    clustersForBranch.remove(cluster);
+		    if(clustersForBranch.size() == 0) {
+			showerToClusterCandidateMap.remove(branch);
+		    }
+		}
+	    }
+	}
+    }
+
+
+    /**
+     * Performs the association between the clusters and the showers
+     * All decisions have already been made and this is mainly book-keeping
+     */
+    protected void  assignClustersToShowers( List<Cluster> clusterSlicesOnThisLayer,
+					     Map<Cluster, PFADetectorLayer> clusterSliceToLayerMap,
+					     Map<Cluster, List<ShowerBranch>> clusterToShowerCandidateMap,
+					     Map<ShowerBranch, List<Cluster>> showerToClusterCandidateMap )
+    {
+
+	// some of the big clusters are assigned to more than one shower
+	// these clusters have substructure and can be split... 
+	// split these clusters
+	splitMultiplyAssignedLargeClusters(clusterSlicesOnThisLayer,
+					   clusterSliceToLayerMap,
+					   clusterToShowerCandidateMap,
+					   showerToClusterCandidateMap);
+
+	// loop on shower branches
+	for(ShowerBranch branch : showerToClusterCandidateMap.keySet()) {
+
+	    // get the clusters to be added to the branch
+	    List<Cluster> clustersToAdd = showerToClusterCandidateMap.get(branch);
+
+	    // if there is only one cluster, add it to the branch
+	    if(clustersToAdd.size() == 1) {
+		branch.addCluster(clustersToAdd.get(0));
+	    }
+	    // if there is more than one cluster, create a separate branch for each
+	    else if(clustersToAdd.size() > 1) {
+
+		// get the branching point from the current branch
+		Hep3Vector branchingPoint = branch.getPosition();
+
+		// get the parent shower to which the new branches belong
+		ShowerWithBranches shower = branch.getMother();
+
+		// create a new branch for each cluster
+		for(Cluster clusToAdd : clustersToAdd) {
+		    ShowerBranch newBranch = new ShowerBranch(branchingPoint);
+		    newBranch.addCluster( clusToAdd );
+		    shower.addBranch( newBranch );
+		}
+	    }
+	    // if list is empty, we have made a mistake somewhere
+	    else throw new AssertionError("book-keeping error");
+	}
+    }
+
+
+    /**
+     * shares the leftover hits
+     */
+    protected void shareLeftoverHits(List<ShowerWithBranches> reconstructedShowers, Collection<CalorimeterHit> hitsToShare)
+    {
+
+	// create bite-sized clusters for the sharing algorithm
+	List<Cluster> smallClustersToShare = new Vector<Cluster>();
+	for (CalorimeterHit hit : hitsToShare) {
+	    BasicCluster tmpClus = new BasicCluster();
+	    tmpClus.addHit(hit);
+	    smallClustersToShare.add(tmpClus);
+	}
+
+	// Set up sharing
+	double maxDistanceForSmallClusters = 250.0; // 25cm isolation cut-off
+	if (m_properties.getFlag("allowSharingOfIsolatedHits")) {
+	    maxDistanceForSmallClusters = 99999.9; // effectively no cut-off
+	}
+
+	// get the photons: these will be excluded fron the cone sharing
+	List<Cluster> photons = (List<Cluster>)m_bookKeeper.getClusterList("Photons");
+
+	// the linkable clusters: clusters among which the sharing is performed
+	// --> use the 2D slices
+	List<Cluster> linkableClusters = new Vector<Cluster>();
+	for(ShowerWithBranches shower : reconstructedShowers) {
+	    for(ShowerBranch branch : shower.getBranches()) {
+		for(Cluster cluster : branch.getClusters()) {
+		    linkableClusters.add(cluster);
+		}
+	    }
+	}
+
+	// share clusters: based on proximity and cone
+	List<SharedClusterGroup> sharedClusters = new Vector<SharedClusterGroup>();
+	MultipleClusterSharingAlgorithm proximityAndConeAlg = new MultipleClusterSharingAlgorithm();
+	proximityAndConeAlg.addAlgorithm(new ProximityClusterSharingAlgorithm(40.0, maxDistanceForSmallClusters));
+	proximityAndConeAlg.addAlgorithm(new ClusterSharingAlgorithmExcludingTargets(new ConeClusterSharingAlgorithm(0.95, 0.90), photons));
+	SharedClusterGroup sharedSmallClusters = new SharedClusterGroup(smallClustersToShare, proximityAndConeAlg);
+	sharedSmallClusters.createShares(linkableClusters);
+	sharedSmallClusters.rebuildHints();
+	sharedClusters.add(sharedSmallClusters);
+
+	// book-keeping: tell the showers about the results
+	for(ShowerWithBranches shower : reconstructedShowers) {
+	    shower.setSharedClusters(sharedClusters);
+	}
+    }
+
+
+    /**
+     * Apply overrides to shower reconstruction
+     */
+    protected void applyOverrides(List<ShowerWithBranches> reconstructedShowers)
+    {
+
+	/* --------- */ boolean printhere = true; /* --------- */ 
+
+
+	/* --------- */ printDebug("ApplyOverrides called with "+reconstructedShowers.size()+" showers in input", printhere);  /* --------- */
+	/* --------- */ for( ShowerWithBranches shower : reconstructedShowers ) {                                              /* --------- */
+        /* --------- */     printDebug("Shower "+reconstructedShowers.indexOf(shower)+": "+                                    /* --------- */
+        /* --------- */                "E = "+shower.getRealEnergy()+" "+                                                      /* --------- */
+	/* --------- */                "P = "+shower.getMomentum()+" "+                                                        /* --------- */
+        /* --------- */                "Layer0 = "+getFirstLayer(shower.getCluster()).id()+" "+                                /* --------- */
+        /* --------- */                "Layer1 = "+getLastLayer(shower.getCluster()).id(), printhere);                         /* --------- */
+	/* --------- */ }                                                                                                      /* --------- */
+
+
+	// categorize showers
+	Map<ShowerType, List<ShowerWithBranches>> showerTypeToShowerListMap = categorizeShowers(reconstructedShowers);
+
+
+	/* --------- */ printDebug("Found "+showerTypeToShowerListMap.get(ShowerType.ST_FRAGMENT).size()+" showers of type ST_FRAGMENT", printhere);                       /* --------- */
+	/* --------- */ printDebug("Found "+showerTypeToShowerListMap.get(ShowerType.ST_NEUTRAL).size()+" showers of type ST_NEUTRAL", printhere);                         /* --------- */
+	/* --------- */ printDebug("Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_INNEED).size()+" showers of type ST_CHARGED_INNEED", printhere);           /* --------- */
+	/* --------- */ printDebug("Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_SATISFIED).size()+" showers of type ST_CHARGED_SATISFIED", printhere);     /* --------- */
+	/* --------- */ printDebug("Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_OVERSTUFFED).size()+" showers of type ST_CHARGED_OVERSTUFFED", printhere); /* --------- */
+
+
+	// loop on tiny shower fragments and assign them to the closest non-tiny shower
+	for(ShowerWithBranches tinyShower : showerTypeToShowerListMap.get(ShowerType.ST_FRAGMENT)) {
+
+	    // tiny shower position
+	    Hep3Vector tinyShowerPos = new BasicHep3Vector(tinyShower.getCluster().getPosition());
+
+	    // best shower to connect to
+	    double bestCosAngle = m_maxCosAngleToConnectTinyShowers;
+	    ShowerWithBranches bestShower = null;
+
+	    // loop on reconstructed showers to find best match
+	    for(ShowerWithBranches shower : reconstructedShowers) {
+
+		// don't connect to another tiny shower
+		if(showerTypeToShowerListMap.get(ShowerType.ST_FRAGMENT).contains(shower)) continue;
+
+		// shower position
+		Hep3Vector showerPos = new BasicHep3Vector(shower.getCluster().getPosition());
+
+		// connect based on angle
+		double cosAngle = VecOp.dot(VecOp.unit(showerPos), VecOp.unit(tinyShowerPos));
+		if(cosAngle > bestCosAngle) {
+		    bestCosAngle = cosAngle;
+		    bestShower = shower;
+		}
+	    }
+
+	    // if a best match was found: connect to it
+	    if(bestShower != null) {
+		for(ShowerBranch branch : tinyShower.getBranches()) {
+		    bestShower.addBranch(branch);
+		}
+		reconstructedShowers.remove(tinyShower);
+	    }
+	}
+
+
+	/* --------- */ printDebug("After tiny showers: "+reconstructedShowers.size()+" showerst", printhere);                  /* --------- */
+	/* --------- */ for( ShowerWithBranches shower : reconstructedShowers ) {                                              /* --------- */
+        /* --------- */     printDebug("Shower "+reconstructedShowers.indexOf(shower)+": "+                                    /* --------- */
+        /* --------- */                "E = "+shower.getRealEnergy()+" "+                                                      /* --------- */
+	/* --------- */                "P = "+shower.getMomentum()+" "+                                                        /* --------- */
+        /* --------- */                "Layer0 = "+getFirstLayer(shower.getCluster()).id()+" "+                                /* --------- */
+        /* --------- */                "Layer1 = "+getLastLayer(shower.getCluster()).id(), printhere);                         /* --------- */
+	/* --------- */ }                                                                                                      /* --------- */
+
+
+	// categorize showers
+	showerTypeToShowerListMap = categorizeShowers(reconstructedShowers);
+
+
+	/* --------- */ printDebug("After tiny showers: Found "+showerTypeToShowerListMap.get(ShowerType.ST_FRAGMENT).size()+" showers of type ST_FRAGMENT", printhere);                       /* --------- */
+	/* --------- */ printDebug("After tiny showers: Found "+showerTypeToShowerListMap.get(ShowerType.ST_NEUTRAL).size()+" showers of type ST_NEUTRAL", printhere);                         /* --------- */
+	/* --------- */ printDebug("After tiny showers: Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_INNEED).size()+" showers of type ST_CHARGED_INNEED", printhere);           /* --------- */
+	/* --------- */ printDebug("After tiny showers: Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_SATISFIED).size()+" showers of type ST_CHARGED_SATISFIED", printhere);     /* --------- */
+	/* --------- */ printDebug("After tiny showers: Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_OVERSTUFFED).size()+" showers of type ST_CHARGED_OVERSTUFFED", printhere); /* --------- */
+
+	List<ShowerWithBranches> smallBranches = new Vector<ShowerWithBranches>();
+	while(reconstructedShowers.size() > 8) {
+	    ShowerWithBranches theSmallest = null;
+	    for(ShowerWithBranches shower : reconstructedShowers) {
+		if(shower.isCharged()) continue;
+		if(theSmallest == null || shower.getRealEnergy() < theSmallest.getRealEnergy()) {
+		    theSmallest = shower;
+		}
+	    }
+	    if(theSmallest == null) break;
+	    reconstructedShowers.remove(theSmallest);
+	    smallBranches.add(theSmallest);
+	}
+
+	List<List<List<ShowerWithBranches>>> allCombinations = makeAllGroupCombinations(reconstructedShowers);
+
+	List<List<ShowerWithBranches>> bestCombination = null;
+	double bestCombinationFigureOfMerit = 0;
+	for(List<List<ShowerWithBranches>> aCombination : allCombinations) {
+	    double combinationFigureOfMerit = getCombinationFigureOfMerit(aCombination);
+	    if(combinationFigureOfMerit > bestCombinationFigureOfMerit) {
+		bestCombinationFigureOfMerit = combinationFigureOfMerit;
+		bestCombination = aCombination;
+	    }
+	}
+
+	if(bestCombination != null) {
+	    for(List<ShowerWithBranches> aGroup : bestCombination) {
+		ShowerWithBranches base = null;
+		for(ShowerWithBranches shower : aGroup) {
+		    if(shower.isCharged()) {
+			base = shower;
+			break;
+		    }
+		}
+		for(ShowerWithBranches shower : aGroup) {
+		    if(base == null) {
+			base = shower;
+		    }else if(shower != base){
+			reconstructedShowers.remove(shower);
+			for(ShowerBranch branch : shower.getBranches()) {
+			    base.addBranch(branch);
+			}
+			for(Track track : shower.getTracks()) {
+			    base.addTrack(track);
+			}
+		    }
+		}
+	    }
+	}
+
+	reconstructedShowers.addAll(smallBranches);
+
+
+	/* --------- */ printDebug("After reconnections: "+reconstructedShowers.size()+" showerst", printhere);                  /* --------- */
+	/* --------- */ for( ShowerWithBranches shower : reconstructedShowers ) {                                              /* --------- */
+        /* --------- */     printDebug("Shower "+reconstructedShowers.indexOf(shower)+": "+                                    /* --------- */
+        /* --------- */                "E = "+shower.getRealEnergy()+" "+                                                      /* --------- */
+	/* --------- */                "P = "+shower.getMomentum()+" "+                                                        /* --------- */
+        /* --------- */                "Layer0 = "+getFirstLayer(shower.getCluster()).id()+" "+                                /* --------- */
+        /* --------- */                "Layer1 = "+getLastLayer(shower.getCluster()).id(), printhere);                         /* --------- */
+	/* --------- */ }                                                                                                      /* --------- */
+
+
+	// categorize showers
+	showerTypeToShowerListMap = categorizeShowers(reconstructedShowers);
+
+
+	/* --------- */ printDebug("After reconnections: Found "+showerTypeToShowerListMap.get(ShowerType.ST_FRAGMENT).size()+" showers of type ST_FRAGMENT", printhere);                       /* --------- */
+	/* --------- */ printDebug("After reconnections: Found "+showerTypeToShowerListMap.get(ShowerType.ST_NEUTRAL).size()+" showers of type ST_NEUTRAL", printhere);                         /* --------- */
+	/* --------- */ printDebug("After reconnections: Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_INNEED).size()+" showers of type ST_CHARGED_INNEED", printhere);           /* --------- */
+	/* --------- */ printDebug("After reconnections: Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_SATISFIED).size()+" showers of type ST_CHARGED_SATISFIED", printhere);     /* --------- */
+	/* --------- */ printDebug("After reconnections: Found "+showerTypeToShowerListMap.get(ShowerType.ST_CHARGED_OVERSTUFFED).size()+" showers of type ST_CHARGED_OVERSTUFFED", printhere); /* --------- */
+
+    }
+
+
+    /**
+     * Calculates a figure of merit for aa combination
+     */
+    protected double getCombinationFigureOfMerit(List<List<ShowerWithBranches>> aCombination) 
+    {
+
+	for(List<ShowerWithBranches> aGroup : aCombination) {
+	    boolean isAllNeutral = true;
+	    for(ShowerWithBranches aShower : aGroup) {
+		if(aShower.isCharged()) {
+		    isAllNeutral = false;
+		    break;
+		}
+	    }
+	    if(isAllNeutral && aGroup.size() > 1) {
+		return -1;
+	    }
+	}
+
+	double figureOfMerit = 1;
+	double nChargedGroup = 0;
+
+	for(List<ShowerWithBranches> aGroup : aCombination) {
+	    boolean isAllNeutral = true;
+	    for(ShowerWithBranches aShower : aGroup) {
+		if(aShower.isCharged()) {
+		    isAllNeutral = false;
+		    break;
+		}
+	    }
+	    if(isAllNeutral) {
+		continue;
+	    }
+	    ShowerWithBranches combinedShower = new ShowerWithBranches(m_chargedCalib);
+	    for(ShowerWithBranches shower : aGroup) {
+		for(ShowerBranch branch : shower.getBranches()) {
+		    combinedShower.addBranch(branch);
+		}
+		for(Track track : shower.getTracks()) {
+		    combinedShower.addTrack(track);
+		}
+		combinedShower.setSharedClusters(shower.getSharedClusters());
+	    }
+
+	    double energyFromInDet = combinedShower.getMomentum();
+	    double energyFromCalo = combinedShower.getRealEnergy();
+	    double energyUncertainty = combinedShower.getEnergyUncertainty();
+
+	    double energyResidual = (energyFromCalo - energyFromInDet) / energyUncertainty;
+
+	    double probEoverP = 2 * Erf.phic(Math.abs(energyResidual));
+
+	    figureOfMerit *= probEoverP;
+	    nChargedGroup += 1;
+
+
+	    /* --------- */ printDebug("Combination: E = "+energyFromCalo+" P = "+energyFromInDet+" S = "+energyUncertainty+" R = "+energyResidual+" p = "+probEoverP); /* --------- */
+	}
+
+
+	if(nChargedGroup < 1) {
+	    return figureOfMerit;
+	}
+
+	return Math.pow(figureOfMerit, 1/nChargedGroup);
+    }
+
+
+    /**
+     * make all possible combinations on grouping showers
+     */
+    protected List<List<List<ShowerWithBranches>>> makeAllGroupCombinations(List<ShowerWithBranches> reconstructedShowers)
+    {
+
+	List<List<ShowerWithBranches>> theFlatPartition = new Vector<List<ShowerWithBranches>>();
[truncated at 1000 lines; 2758 more skipped]

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
BaselineShowerBuilder.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- BaselineShowerBuilder.java	23 Oct 2011 09:50:31 -0000	1.2
+++ BaselineShowerBuilder.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -9,12 +9,14 @@
 import org.lcsim.util.swim.*;
 import org.lcsim.util.hitmap.*;
 import org.lcsim.mc.fast.tracking.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.debug.*;
 import org.lcsim.geometry.subdetector.*;
 import org.lcsim.geometry.*;
+import org.lcsim.recon.util.CalorimeterInformation;
+import org.lcsim.geometry.Calorimeter.CalorimeterType;
 
 public class BaselineShowerBuilder implements IShowerBuilder {
 
@@ -94,7 +96,8 @@
 	Map<Track, Set<Track>> mapTrackToJet = null;
 
 	// Iterate to build clusters:
-	for (int iIter=0; iIter<10; iIter++) {
+	int nIter = (int)m_properties.getCut("numberOfShowerBuildingIterations");
+	for (int iIter=0; iIter<nIter; iIter++) {
 	    // 	    newMapShowerComponentToTrack.clear();
 	    // 	    newMapTrackToShowerComponents.clear();
 	    // 	    newMapTrackToVetoedAdditions.clear();
@@ -1689,11 +1692,9 @@
     }
     protected int countHitsInLastLayersOfHcal(Collection<Cluster> clusters, int nLayersToCheck) {
 	// Pick up detector geometry
-        Detector det = m_event.getDetector();
-        CylindricalCalorimeter hadBarrel = ((CylindricalCalorimeter) det.getSubdetectors().get("HADBarrel"));
-        CylindricalCalorimeter hadEndcap = ((CylindricalCalorimeter) det.getSubdetectors().get("HADEndcap"));
-        int nLayersHadBarrel = hadBarrel.getLayering().getLayerCount();
-        int nLayersHadEndcap = hadEndcap.getLayering().getLayerCount();
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	int nLayersHadBarrel = ci.getNLayers(CalorimeterType.HAD_BARREL);
+	int nLayersHadEndcap = ci.getNLayers(CalorimeterType.HAD_ENDCAP);
 
 	// Scan for hits
 	Set<Long> hitsFoundInLastLayersOfHcal = new HashSet<Long>();
@@ -1723,13 +1724,12 @@
     }
     protected int countHitsInSideEdgesOfHcal(Cluster clus, int nLayersToCheck) {
 	// Pick up detector geometry
-        Detector det = m_event.getDetector();
-        CylindricalCalorimeter hadBarrel = ((CylindricalCalorimeter) det.getSubdetectors().get("HADBarrel"));
-	double backZ = hadBarrel.getZMax();
-	double innerRadius = hadBarrel.getInnerRadius();
+	CalorimeterInformation ci = CalorimeterInformation.instance();
+	double backZ = ci.getZMax(CalorimeterType.HAD_BARREL);
+	double innerRadius = ci.getRMin(CalorimeterType.HAD_BARREL);
 	double tanTheta = innerRadius/backZ;
 
-	org.lcsim.geometry.IDDecoder id = hadBarrel.getIDDecoder();
+	IDDecoder id = ci.getIDDecoder(CalorimeterType.HAD_BARREL);
 	if (id instanceof org.lcsim.geometry.segmentation.NonprojectiveCylinder) {
 	    org.lcsim.geometry.segmentation.NonprojectiveCylinder segmentation = (org.lcsim.geometry.segmentation.NonprojectiveCylinder) id;
 	    double gridZ = segmentation.getGridSizeZ();

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
ChargedHadronClusterEnergyCalculator.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ChargedHadronClusterEnergyCalculator.java	23 Oct 2011 09:50:31 -0000	1.2
+++ ChargedHadronClusterEnergyCalculator.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -18,7 +18,7 @@
   *
   * @author Mat Charles <[log in to unmask]>
   *
-  * @version $Id: ChargedHadronClusterEnergyCalculator.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: ChargedHadronClusterEnergyCalculator.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   */
 
 public class ChargedHadronClusterEnergyCalculator extends Driver implements ClusterEnergyCalculator

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
CheckDisjoint.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- CheckDisjoint.java	23 Oct 2011 09:50:31 -0000	1.2
+++ CheckDisjoint.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -8,7 +8,7 @@
 import org.lcsim.recon.cluster.directedtree.*;
 import org.lcsim.util.hitmap.*;
 import org.lcsim.util.decision.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 /**
  * Validation driver to check that two collections/hitmaps are disjoint.
@@ -17,7 +17,7 @@
  *   2) A Collection of Cluster objects
  *   3) A HitMap
  *
- * @version $Id: CheckDisjoint.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: CheckDisjoint.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  * @author [log in to unmask]
  */
 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
CheckSkeletonsForMultipleTracks.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- CheckSkeletonsForMultipleTracks.java	27 May 2011 12:01:11 -0000	1.1
+++ CheckSkeletonsForMultipleTracks.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -10,7 +10,7 @@
 import org.lcsim.event.util.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.*;
 import org.lcsim.recon.cluster.util.BasicCluster;
-import org.lcsim.recon.pfa.identifier.TrackClusterMatcher;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackClusterMatcher;
 
 public class CheckSkeletonsForMultipleTracks extends Driver {
 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
ConeMIPReassignmentAlgorithm.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ConeMIPReassignmentAlgorithm.java	23 Oct 2011 09:50:31 -0000	1.2
+++ ConeMIPReassignmentAlgorithm.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -13,7 +13,7 @@
  * the track at its showering point. We then calculate the angle
  * between the tangent and the vector from the apex to the cluster.
  *
- * @version $Id: ConeMIPReassignmentAlgorithm.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: ConeMIPReassignmentAlgorithm.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  */
 
 public class ConeMIPReassignmentAlgorithm implements ReassignClustersAlgorithm {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
ConeReassignmentAlgorithm.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- ConeReassignmentAlgorithm.java	27 May 2011 12:01:11 -0000	1.1
+++ ConeReassignmentAlgorithm.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -5,7 +5,7 @@
 import org.lcsim.event.util.*;
 import org.lcsim.event.*;
 import hep.physics.vec.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 public class ConeReassignmentAlgorithm implements ReassignClustersAlgorithm {
     protected double m_limit;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
ExampleGenerateLikelihood.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ExampleGenerateLikelihood.java	23 Oct 2011 09:50:31 -0000	1.2
+++ ExampleGenerateLikelihood.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -17,7 +17,7 @@
  * energy.
  *
  * @author Mat <[log in to unmask]>
- * @version $Id: ExampleGenerateLikelihood.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: ExampleGenerateLikelihood.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  */
 
 public class ExampleGenerateLikelihood extends Driver

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
FuzzyCalorimeterHit.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FuzzyCalorimeterHit.java	23 Oct 2011 09:50:31 -0000	1.2
+++ FuzzyCalorimeterHit.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -9,7 +9,7 @@
   * Watch out! Regular calibrations won't look at the weight and will over-count
   * the hit's energy.
   *
-  * @version $Id: FuzzyCalorimeterHit.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: FuzzyCalorimeterHit.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   */
 
 public class FuzzyCalorimeterHit

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
FuzzyNeutralHadronClusterEnergyCalculator.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FuzzyNeutralHadronClusterEnergyCalculator.java	23 Oct 2011 09:50:31 -0000	1.2
+++ FuzzyNeutralHadronClusterEnergyCalculator.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -14,7 +14,7 @@
   * an extension of Ron's DetailedNeutralHadronClusterEnergyCalculator
   * class.
   * 
-  * @version $Id: FuzzyNeutralHadronClusterEnergyCalculator.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: FuzzyNeutralHadronClusterEnergyCalculator.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   */
 
 public class FuzzyNeutralHadronClusterEnergyCalculator extends DetailedNeutralHadronClusterEnergyCalculator

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
FuzzyPhotonClusterEnergyCalculator.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FuzzyPhotonClusterEnergyCalculator.java	23 Oct 2011 09:50:31 -0000	1.2
+++ FuzzyPhotonClusterEnergyCalculator.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -14,7 +14,7 @@
   * an extension of Ron's PhotonClusterEnergyCalculator
   * class.
   * 
-  * @version $Id: FuzzyPhotonClusterEnergyCalculator.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: FuzzyPhotonClusterEnergyCalculator.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   */
 
 public class FuzzyPhotonClusterEnergyCalculator extends PhotonClusterEnergyCalculator

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
FuzzyQNeutralHadronClusterEnergyCalculator.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FuzzyQNeutralHadronClusterEnergyCalculator.java	23 Oct 2011 09:50:31 -0000	1.2
+++ FuzzyQNeutralHadronClusterEnergyCalculator.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -14,7 +14,7 @@
   * an extension of Ron's QNeutralHadronClusterEnergyCalculator
   * class.
   * 
-  * @version $Id: FuzzyQNeutralHadronClusterEnergyCalculator.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: FuzzyQNeutralHadronClusterEnergyCalculator.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   */
 
 public class FuzzyQNeutralHadronClusterEnergyCalculator extends QNeutralHadronClusterEnergyCalculator

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
FuzzyQPhotonClusterEnergyCalculator.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- FuzzyQPhotonClusterEnergyCalculator.java	23 Oct 2011 09:50:31 -0000	1.2
+++ FuzzyQPhotonClusterEnergyCalculator.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -13,7 +13,7 @@
   * clusters such that the total weight adds up to 1. This is
   * an extension of Ron's QPhotonClusterEnergyCalculator class.
   *
-  * @version $Id: FuzzyQPhotonClusterEnergyCalculator.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: FuzzyQPhotonClusterEnergyCalculator.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   */
 
 public class FuzzyQPhotonClusterEnergyCalculator extends QPhotonClusterEnergyCalculator

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
HelixTangentMIPGeometryHandler.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- HelixTangentMIPGeometryHandler.java	23 Oct 2011 09:50:31 -0000	1.2
+++ HelixTangentMIPGeometryHandler.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -5,7 +5,7 @@
 import org.lcsim.event.util.*;
 import org.lcsim.event.*;
 import hep.physics.vec.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.cluster.util.BasicCluster;
 import org.lcsim.geometry.*;
 
@@ -20,7 +20,7 @@
  * The calculation is based on identifying the outermost hit
  * and then checking the track helix near that point.
  *
- * @version $Id: HelixTangentMIPGeometryHandler.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: HelixTangentMIPGeometryHandler.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  */
 
 public class HelixTangentMIPGeometryHandler extends MIPGeometryHandler {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
HitBookKeeper.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- HitBookKeeper.java	23 Oct 2011 09:50:31 -0000	1.2
+++ HitBookKeeper.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -24,7 +24,7 @@
  * second group might be a list of clusters plus a
  * HitMap of left-over hits.
  *
- * @version $Id: HitBookKeeper.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: HitBookKeeper.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  */
 
 public class HitBookKeeper extends Driver

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
HitFilterDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- HitFilterDriver.java	23 Oct 2011 09:50:31 -0000	1.2
+++ HitFilterDriver.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -10,7 +10,7 @@
  * This class takes in a List<CalorimeterHit> and
  * filter it according to user filter options
  *
- * @version $Id: HitFilterDriver.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: HitFilterDriver.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  */
 
 public class HitFilterDriver extends Driver

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
LayerBasedMIPGeometryHandler.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- LayerBasedMIPGeometryHandler.java	23 Oct 2011 09:50:31 -0000	1.2
+++ LayerBasedMIPGeometryHandler.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -3,7 +3,7 @@
 import java.util.*; 
 import org.lcsim.event.*;
 import hep.physics.vec.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.cluster.util.BasicCluster;
 import org.lcsim.geometry.*;
 import org.lcsim.recon.util.CalorimeterInformation;
@@ -23,7 +23,7 @@
  * from the IP, then finding the hits in the outermost layer
  * of that subdetector.
  *
- * @version $Id: LayerBasedMIPGeometryHandler.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: LayerBasedMIPGeometryHandler.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  */
 
 public class LayerBasedMIPGeometryHandler extends MIPGeometryHandler {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
MIPGeometryHandler.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- MIPGeometryHandler.java	23 Oct 2011 09:50:31 -0000	1.2
+++ MIPGeometryHandler.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -10,7 +10,7 @@
  * that point. Implementation is delegated to
  * subclasses.
  *
- * @version $Id: MIPGeometryHandler.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: MIPGeometryHandler.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  */
 
 public abstract class MIPGeometryHandler {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
MergeClustersCrossingSubDetectorBoundaries.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- MergeClustersCrossingSubDetectorBoundaries.java	23 Oct 2011 09:50:31 -0000	1.2
+++ MergeClustersCrossingSubDetectorBoundaries.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -14,7 +14,7 @@
  * An algorithm to merge clusters that get split because crossing 
  * subdetector boundaries.
  *
- * @version $Id: MergeClustersCrossingSubDetectorBoundaries.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: MergeClustersCrossingSubDetectorBoundaries.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  * @author Remi Zaidan <[log in to unmask]>
  */
 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
NewShowerBuilder.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- NewShowerBuilder.java	23 Oct 2011 09:50:31 -0000	1.1
+++ NewShowerBuilder.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -1,16 +1,17 @@
 package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural;
 
 import java.util.*;
+import java.io.IOException;
 import hep.physics.vec.*;
 import org.lcsim.event.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
-import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.debug.*;
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.debug.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 import org.lcsim.util.*;
 import org.lcsim.util.lcio.LCIOConstants;
@@ -20,6 +21,9 @@
 
 import org.lcsim.math.probability.Erf;
 
+
+
+
 public class NewShowerBuilder implements IShowerBuilder
 {
     protected PropertyContainer m_properties;
@@ -28,8 +32,8 @@
     protected LinkDecisions.LinkDecision m_linkDecision;
     protected LinkDecisions.LinkDecision m_linkBreakDecision;
     protected LinkDecisions.LinkDecision m_linkDecisionPhaseSpace;
-    protected LinkDecisions.LinkDecision m_linkDecisionPrimary; // alpha
-    protected LinkDecisions.LinkDecision m_linkDecisionSecondary; // alpha
+    protected LinkDecisions.LinkDecision m_linkDecisionPrimary;
+    protected LinkDecisions.LinkDecision m_linkDecisionSecondary;
 
     protected ILikelihoodEvaluator m_primaryShowerEval = null;
     protected ILikelihoodEvaluator m_showerToShowerEval = null;
@@ -41,10 +45,10 @@
     protected ClusterEnergyCalculator m_chargedCalib;
     protected ClusterEnergyCalculator m_neutralCalib;
 
-    protected DebugUtils m_debugUtils;
-    protected LinkQualityChecker m_LQChecker;
 
-    protected ILikelihoodEvaluator m_eval;
+    DebugUtils m_debugUtils; //gamma
+    LinkQualityChecker m_LQChecker; //gamma
+
 
     /**
      * Constructor
@@ -61,21 +65,6 @@
 	m_neutralCalib = neutralCalib;
         m_extrapolator = extrapolator;
 
-	CalorimeterInformation ci = CalorimeterInformation.instance();
-
-	//---- Configure MC-truth checking
-	String mcListName = m_properties.getKey("MCListName");
-	String EcalDigiHitMapName = m_properties.getKey("EcalDigiHitMapName");
-	String HcalDigiHitMapName = m_properties.getKey("HcalDigiHitMapName");
-
-        m_debugUtils = new DebugUtils();
-        m_debugUtils.setMCListName(mcListName);
-        m_debugUtils.setEcalDigiHitMapName(EcalDigiHitMapName);
-        m_debugUtils.setHcalDigiHitMapName(HcalDigiHitMapName);
-        m_debugUtils.setEnergyBased(true);
-
-	m_LQChecker = new DominantParticleBasedLQChecker(m_debugUtils);
-
         if(!m_properties.getFlag("makeShowerLikelihoodPDF"))
 	    {
 		m_primaryShowerEval = new LikelihoodEvaluatorWrapper( m_properties.getKey("showerLikelihoodPath") );
@@ -101,29 +90,25 @@
 	}else{
 	    // score & outgoing
 	    m_linkBreakDecision = new LinkDecisions.ScoreBasedLinkDecision(m_properties.getCut("scoreCut"));
+	    m_linkDecision = new LinkDecisions.OutgoingLinkDecision();
 	    m_linkDecisionPhaseSpace = new LinkDecisions.AngleAtCenterBasedLinkDecision( m_properties.getCut("angleForLinkingCut") );
-	    List<LinkDecisions.LinkDecision> decisions = new Vector<LinkDecisions.LinkDecision>();
-	    decisions.add(new LinkDecisions.OutgoingLinkDecision());
-	    List<String> ecalNames = new Vector<String>();
-	    ecalNames.add(ci.getName(CalorimeterType.EM_BARREL));
-	    ecalNames.add(ci.getName(CalorimeterType.EM_ENDCAP));
-	    LinkDecisions.LinkDecision ecalLinkDecision = new LinkDecisions.SubdetectorBasedLinkDecision(ecalNames);
-	    List<String> hcalNames = new Vector<String>();
-	    hcalNames.add(ci.getName(CalorimeterType.HAD_BARREL));
-	    hcalNames.add(ci.getName(CalorimeterType.HAD_ENDCAP));
-	    hcalNames.add(ci.getName(CalorimeterType.MUON_ENDCAP));
-	    LinkDecisions.LinkDecision hcalLinkDecision = new LinkDecisions.SubdetectorBasedLinkDecision(hcalNames);
-	    List<LinkDecisions.LinkDecision> subDetectorDecisions = new Vector<LinkDecisions.LinkDecision>();
-	    subDetectorDecisions.add(ecalLinkDecision);
-	    subDetectorDecisions.add(hcalLinkDecision);
-	    //	    decisions.add(new LinkDecisions.OrLinkDecision(subDetectorDecisions));
-	    m_linkDecision = new LinkDecisions.AndLinkDecision(decisions);
 	}
 
-	m_eval = new LikelihoodEvaluatorWrapper(m_properties.getKey("LikelihoodPath"));
-
-	// alpha
 	m_linkDecisionPrimary = new LinkDecisions.LikelihoodBasedLinkDecision( m_primaryShowerEval , m_properties.getCut( "primaryShowerScoreCut" ) , "PrimaryShower" );
+
+
+
+
+	m_debugUtils = new DebugUtils(); //gamma
+	m_debugUtils.setMCListName( m_properties.getKey( "MCListName" ) ); //gamma
+	m_debugUtils.setEcalDigiHitMapName( m_properties.getKey( "EcalDigiHitMapName" ) ); //gamma
+	m_debugUtils.setHcalDigiHitMapName( m_properties.getKey( "HcalDigiHitMapName" ) ); //gamma
+	m_debugUtils.setEnergyBased( true ); //gamma
+
+	m_LQChecker = new DominantParticleBasedLQChecker(m_debugUtils); //gamma
+
+
+
     }
 
 
@@ -132,15 +117,21 @@
      */
     public void buildChargedHadronShowers(){
 
-	m_debugUtils.setEventInfo(m_bookKeeper.getEvent());
-	List<StructuralLikelihoodQuantity> quantities = m_eval.getLikelihoodQuantities();
-	for(StructuralLikelihoodQuantity quantity : quantities){
-	    quantity.setEventInfo(m_bookKeeper.getEvent());
-	}
+	m_debugUtils.setEventInfo( m_bookKeeper.getEvent() );
 
 	// do a first iteration: build skeletons
 	m_showerContainer = new ShowerContainer();
 	chargedHadronsFirstIteration();
+	
+	// create the shower container
+	m_showerContainer = new ShowerContainer();
+
+	// do a first iteration: build skeletons
+	chargedHadronsFirstIteration(true);
+
+	// do a first iteration: build skeletons
+	m_showerContainer = new ShowerContainer();
+	chargedHadronsFirstIteration(false);
 
 	if(!m_properties.getFlag("makeShowerLikelihoodPDF"))
 	    {
@@ -207,20 +198,219 @@
 
 	List<SharedClusterGroup> allSharedClusters = m_bookKeeper.getAllSharedClusters();
 
-	if(m_properties.getFlag("debug")){
+	if(m_properties.getFlag("debug") || true){
 	    System.out.println("ShowerBuilding: event has "+tracks.size()+" tracks and "+m_bookKeeper.getClusterList("Linkable Clusters").size()+" linkable clusters");
 	}
 
+
 	for( Track track : tracks )
 	    {
+		// seed
+		Cluster seed = tracksMatchedToClusters.get( track );
+		if( seed == null )
+		    {
+			throw new AssertionError( "Book keeping error" );
+		    }
+
+		Shower shower = m_showerContainer.createShower( m_chargedCalib , allSharedClusters , track , seed );
+
+		//gamma
+		shower.declareFlag( "isPrimary" , true );
+	    }
+
+	Set<Shower> showers = m_showerContainer.getShowers();
+
+	if( m_properties.getFlag( "debug" ) )
+	    {
+		System.out.println( "ShowerBuilding: created " + showers.size() + " showers" );
+	    }
+
+	si = 0;
+	for( Shower shower : showers )
+	    {
+		Set<Cluster> seeds =  shower.getSeeds();
+
+		/////
+
+		/////
+
+		if( m_properties.getFlag( "debug" ) )
+		    {
+			System.out.println( "ShowerBuilding: Shower " + si + " has " + seeds.size() + " seeds and " + shower.size() + " assigned clusters" );
+		    }
+	        ss = 0;
+
+		
+		for( Cluster seed : seeds )
+		    {
+			if( m_properties.getFlag( "debug" ) )
+			    {
+				System.out.println( "ShowerBuilding: assiging clusters to seed " + ss + " which has " + seed.getCalorimeterHits().size() + " hits" );
+			    }
+			assignLinksToCluster( seed , shower , m_showerContainer );
+			ss++;
+		    }
+	       		
+		///////
+
+		boolean toSolve = false;
+		
+		double residual = ( shower.scalarMomentum() - shower.coreEnergy() ) / shower.estimatedEnergyUncertainty();
+		if( residual > 5. )
+		    {
+			if( shower.getSeeds().size() == shower.getShowerComponents().size() ) toSolve = true;
+		    }
+			
+		toSolve = false;
+
+		if( toSolve )
+		    {
+			Set<Track> trks = shower.getTracks();
+			
+			for( Track trk : trks )
+			    {
+
+
+
+				for( Cluster clus : m_bookKeeper.getClusterList( "Linkable Clusters" ) )
+				    {
+					boolean isOk = false;
+					
+					try
+					    {
+						isOk = m_LQChecker.accept( trk , clus );
+					    }
+					catch( LinkDecisions.DecisionCannotBeMadeException e )
+					    {
+						isOk = false;
+					    }
+					
+					if( isOk )
+					    {
+						if( !shower.contains( clus ) )
+						    {
+							m_showerContainer.addClusterToShower( shower, clus );
+						    }
+					    }
+
+
+
+					/*
+					MCParticle dominantParticle = m_energyBasedDebugUtils.quoteDominantParticle( clus );
+					MCParticle p1 = m_energyBasedDebugUtils.backTrace( dominantParticle );
+
+					List<MCParticle> mcList = m_energyBasedDebugUtils.getMCParticle( trk );
+					
+					for( MCParticle mc : mcList )
+					    {
+						MCParticle p2 = m_energyBasedDebugUtils.backTrace( mc );
+						if( p1 == p2 && !isOk ) System.out.println( ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 1" );
+						if( p1 != p2 && isOk )  System.out.println( ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> 2" );
+					    }
+					*/
+					
+
+				    }
+			    }
+		    }
+
+		///////
+		/*
+		if( seeds.size() == shower.getShowerComponents().size() )
+		    {
+			double residual = ( shower.scalarMomentum() - shower.coreEnergy() ) / shower.estimatedEnergyUncertainty();
+			if( residual > 5. )
+			    {
+				Map< Cluster , List<ScoredLink> > potentialLinks = m_bookKeeper.getPotentialLinks();
+				for( Cluster seed : seeds )
+				    {
+					List<ScoredLink> listScoredLinks = potentialLinks.get( seed );
+					if( listScoredLinks != null )
+					    {
+						if( listScoredLinks.size() > 0 )
+						    {
+							Cluster ClusterWithStrongestLisk = listScoredLinks.get( 0 ).counterpart( seed );
+							m_showerContainer.addClusterToShower( shower , ClusterWithStrongestLisk );
+							assignLinksToCluster( ClusterWithStrongestLisk , shower , m_showerContainer );
+						    }
+					    }
+				    }
+			    }
+		    }
+		*/
+		///////
+		
+
+		if(m_properties.getFlag("debug")){
+		    System.out.println("ShowerBuilding: After assigning clusters to seeds, shower "+si+" has "+shower.size()+" assigned clusters");
+		}
+		si++;
+	    }
+
+	if(m_properties.getFlag("keepShowerContainersAtEachStep")) {
+	    m_bookKeeper.addShowerContainer("charged showers after first iteration fixing failures", m_showerContainer.clone());
+	}
+    }
+
+
+
+
+    protected void chargedHadronsFirstIteration(boolean useTrack)
+    {
+
+	MiscUtilities.extrapolator = m_bookKeeper.getExtrapolator();
+	MiscUtilities.useTrackDirection = useTrack;
+	MiscUtilities.clustersMatchedToTracks = m_bookKeeper.getClustersMatchedToTracks();
+
+	CalorimeterInformation ci = CalorimeterInformation.instance();
 
+	Map<Track,Cluster> tracksMatchedToClusters = m_bookKeeper.getTracksMatchedToClusters();
+	Set<Track> tracks = tracksMatchedToClusters.keySet();
+
+	List<SharedClusterGroup> allSharedClusters = m_bookKeeper.getAllSharedClusters();
+
+	if(m_properties.getFlag("debug")){
+	    System.out.println("ShowerBuilding: event has "+tracks.size()+" tracks and "+m_bookKeeper.getClusterList("Linkable Clusters").size()+" linkable clusters");
+	}
+
+	for( Track track : tracks )
+	    {
 		// seed
+		
 		Cluster seed = tracksMatchedToClusters.get( track );
 		if(seed == null){
 		    throw new AssertionError("Book keeping error");
 		}
 
 		Shower shower = m_showerContainer.createShower(m_chargedCalib, allSharedClusters, track, seed);
+		
+
+		/*
+		Cluster seed = null;
+		Shower shower = null;
+
+		for( Cluster clus : m_bookKeeper.getClusterList( "Linkable Clusters Excluding Photons" ) )
+		    {
+			boolean isOk = false;
+			
+			try
+			    {
+				isOk = m_LQChecker.accept( track , clus );
+			    }
+			catch( LinkDecisions.DecisionCannotBeMadeException e )
+			    {
+				isOk = false;
+			    }
+			
+			if( isOk )
+			    {
+				seed = clus;
+				shower = m_showerContainer.createShower(m_chargedCalib, allSharedClusters, track, seed);
+				break;
+			    }
+		    }
+		*/
+
 
 		//gamma
 		shower.declareFlag( "isPrimary" , true );
@@ -231,108 +421,305 @@
 	    System.out.println("ShowerBuilding: created "+showers.size()+" showers");
 	}
 
-// 	si = 0;
-// 	for( Shower shower : showers )
-// 	    {
-// 		Set<Cluster> seeds =  shower.getSeeds();
-// 		if(m_properties.getFlag("debug")){
-// 		    System.out.println("ShowerBuilding: Shower "+si+" has "+seeds.size()+" seeds and "+shower.size()+" assigned clusters");
-// 		}
-// 	        ss = 0;
-// 		for( Cluster seed : seeds )
-// 		    {
-// 			if(m_properties.getFlag("debug")){
-// 			    System.out.println("ShowerBuilding: assiging clusters to seed "+ss+" which has "+seed.getCalorimeterHits().size()+" hits");
-// 			}
-// 			assignLinksToCluster(seed, shower, m_showerContainer);
-// 			ss++;
-// 		    }
-// 		if(m_properties.getFlag("debug")){
-// 		    System.out.println("ShowerBuilding: After assigning clusters to seeds, shower "+si+" has "+shower.size()+" assigned clusters");
-// 		}
-// 		si++;
-// 	    }
-
-	Set<Cluster> linkableClusters = new HashSet<Cluster>();
-	linkableClusters.addAll(m_bookKeeper.getClusterList("Linkable Clusters Excluding Photons"));
-	Set<Cluster> linkedClusters = new HashSet<Cluster>();
-	linkedClusters.addAll(m_showerContainer.getUsedClusters());
-	Set<Cluster> unlinkedClusters = new HashSet<Cluster>();
-	unlinkedClusters.addAll(linkableClusters);
-	unlinkedClusters.removeAll(linkedClusters);
-	Map<Cluster, List<ScoredLink>> potentialLinks = m_bookKeeper.getPotentialLinks();
-
-	for(Cluster seedCandidate : unlinkedClusters){
-
-	    boolean foundLink = false;
-	    for(Cluster base : linkableClusters){
-		List<ScoredLink> linksForBase = potentialLinks.get(base);
-		if(linksForBase == null) continue;
-		for(ScoredLink link : linksForBase){
-		    if(link.counterpart(base) != seedCandidate) continue;
-		    try{
-			if(!m_linkDecision.accept(link)){
-			    continue;
-			}
-			if(!m_linkBreakDecision.accept(link)){
-			    break;
-			}
-		    }catch(LinkDecisions.DecisionCannotBeMadeException e){continue;}
-		    foundLink = true;
-		    break;
+	si = 0;
+
+	Collection<Cluster> mips = m_bookKeeper.getClusterList("Mips");
+	Collection<Cluster> clumps = m_bookKeeper.getClusterList("Clumps");
+	Collection<Cluster> leftoverHitClusters = m_bookKeeper.getClusterList( "Leftovers" );
+
+	Set<Shower> showers_ = new HashSet<Shower>();
+	showers_.addAll( showers );
+
+	for( Shower shower : showers_ )
+	    {
+		Set<Cluster> seeds =  shower.getSeeds();
+
+		Set<Cluster> seeds2 = new HashSet<Cluster>();
+		seeds2.addAll( seeds ); // gammaphi
+
+		Collection<Cluster> linkableClusters = m_bookKeeper.getClusterList( "Linkable Clusters Excluding Photons" );
+		Set<Track> tracks2 = shower.getTracks();
+
+		boolean isSpineCase = false;
+
+		/*
+                if( trk instanceof MultipleTrackTrack ) // FixMe!!! in case the direction of the tracks are so different.                                                                             
+		    {
+
+
+		int ii = 0;
+		for( Track subTrack : trk.getTracks() )
+		    {
+			HelixExtrapolationResult result = m_extrapolator.performExtrapolation( subTrack );
+			if( result == null ) { System.out.println( "> + > + >   Extrapolatin of the subTrack failed." ); continue; }
+
+			result.extendToECALEndcapLayer( 0 );
+			Hep3Vector vTemp = result.getInterceptPoint();
+			Hep3Vector vTempDir = result.getTangent();
+
+			if( vTemp == null || vTempDir == null )
+			    {
+				result.extendToECALBarrelLayer( 0 );
+				vTemp = result.getInterceptPoint();
+				vTempDir = result.getTangent();
+			    }
+
+			if( vTemp == null || vTempDir == null ) { System.out.println( "> + > + >   Extrapolatin of the subTrack failed 2." ); continue; }
+
+			positionOfTrack  = VecOp.add( positionOfTrack  , vTemp );
+			directionOfTrack = VecOp.add( directionOfTrack , VecOp.unit( vTempDir ) );
+			momentumOfTrack  = VecOp.add( momentumOfTrack  , new BasicHep3Vector( subTrack.getMomentum() ) );
+
+			HelixExtrapolationResult result2 = m_extrapolator.performExtrapolation( subTrack );
+			listResult.add( result2 );
+
+			ii++;
+		    }
+
+		double scale = 1. / (double)ii;
+		positionOfTrack  = VecOp.mult( scale , positionOfTrack );
+		directionOfTrack = VecOp.unit( directionOfTrack );
+		*/
+
+
+
+
+		if( tracks2.size() == 1 )
+		    {
+			isSpineCase = true;
+
+			for( Track track : tracks2 )
+			    {
+				List<TrackerHit> listHits = track.getTrackerHits();
+				if( listHits.size() == 0 )
+				    {
+					isSpineCase = false;
+					break;
+				    }
+
+				Hep3Vector positionOfTrack = null;
+				Hep3Vector directionOfTrack = null;
+
+				HelixExtrapolationResult result = m_extrapolator.performExtrapolation( track );
+				if( result == null )
+				    {
+					isSpineCase = false;
+					break;
+				    }
+
+				result.extendToECALEndcapLayer( 0 );
+				positionOfTrack  = result.getInterceptPoint();
+				directionOfTrack = result.getTangent();
+
+				if( positionOfTrack == null || directionOfTrack == null )
+				    {
+					result.extendToECALBarrelLayer( 0 );
+					positionOfTrack  = result.getInterceptPoint();
+					directionOfTrack = result.getTangent();
+				    }
+
+				if( positionOfTrack == null || directionOfTrack == null )
+				    {
+					isSpineCase = false;
+					break;
+				    }
+
+				for( Cluster clus : linkableClusters )
+				    {
+					Hep3Vector positionOfClus = new BasicHep3Vector( clus.getPosition() );
+
+					Hep3Vector delta = VecOp.sub( positionOfClus , positionOfTrack );
+
+					double cosA = VecOp.dot( delta , directionOfTrack ) / ( delta.magnitude() * directionOfTrack.magnitude() );
+
+					double rr = delta.magnitude() * Math.sqrt( 1. - ( cosA * cosA ) );
+
+					if( rr < 3. )
+					    {
+						seeds2.add( clus );
+						if( !shower.contains( clus ) ) m_showerContainer.addClusterToShower( shower , clus );
+					    }
+				    }
+			    }
+		    }
+
+
+
+
+		if(m_properties.getFlag("debug")){
+		    System.out.println("ShowerBuilding: Shower "+si+" has "+seeds.size()+" seeds and "+shower.size()+" assigned clusters");
 		}
-		if(foundLink) break;
-	    }
-	    if(foundLink) continue;
+	        ss = 0;
+		
+		if( isSpineCase )
+		    {
+			for( Cluster seed : seeds2 )
+			    {
+				if(m_properties.getFlag("debug")){
+				    System.out.println("ShowerBuilding: assiging clusters to seed "+ss+" which has "+seed.getCalorimeterHits().size()+" hits");
+				}
+				assignLinksToCluster(seed, shower, m_showerContainer);
+				ss++;
+			    }
+		    }
+		else
+		    {
+			for( Cluster seed : seeds )
+			    {
+				if(m_properties.getFlag("debug")){
+				    System.out.println("ShowerBuilding: assiging clusters to seed "+ss+" which has "+seed.getCalorimeterHits().size()+" hits");
+				}
+				assignLinksToCluster(seed, shower, m_showerContainer);
+				ss++;
+			    }
+		    }
+		
 
-	    Shower shhhh = m_showerContainer.createShower(m_neutralCalib, allSharedClusters, seedCandidate);
-	    shhhh.declareFlag("isPrimary", false);
-	}
 
-	while(unlinkedClusters.size()!=0){
 
-	    linkedClusters.addAll(m_showerContainer.getUsedClusters());
-	    unlinkedClusters.removeAll(linkedClusters);
+		// alpha
+		/*
+		Set<Track> tracks_ = shower.getTracks();
 
-	    ScoredLink bestLink = null;
-	    for(Cluster base : linkedClusters){
-		List<ScoredLink> linksForCluster = potentialLinks.get(base);
-		if(linksForCluster != null){
-		    for(ScoredLink link : linksForCluster){
-			Cluster target = link.counterpart(base);
-			if(!unlinkedClusters.contains(target)) continue;
-			try{
-			    if(!m_linkDecision.accept(link)){
-				continue;
+		double en_ = 100.;
+
+		//		for( Track track : tracks )
+		{
+			en_ = shower.scalarMomentum() - shower.coreEnergy();
+			if( en_ < 0. ) en_ = -1. * en_ ;
+		    }
+
+		if( en_ > 3. ) m_showerContainer.removeShower( shower );
+		*/
+		//////////
+
+		boolean toSolve = false;
+		
+		double residual = ( shower.scalarMomentum() - shower.coreEnergy() ) / shower.estimatedEnergyUncertainty();
+		if( residual > 5. )
+		    {
+			if( shower.getSeeds().size() == shower.getShowerComponents().size() ) toSolve = true;
+		    }
+		
+		toSolve = false;
+
+		if( toSolve )
+		    {
+			Set<Track> trks = shower.getTracks();
+			
+			for( Track trk : trks )
+			    {
+				for( Cluster clus : m_bookKeeper.getClusterList( "Linkable Clusters Excluding Photons" ) )
+				    {
+					boolean isOk = false;
+					
+					try
+					    {
+						isOk = m_LQChecker.accept( trk , clus );
+					    }
+					catch( LinkDecisions.DecisionCannotBeMadeException e )
+					    {
+						isOk = false;
+					    }
+					
+					if( isOk )
+					    {
+						if( !shower.contains( clus ) )
+						    {
+							m_showerContainer.addClusterToShower( shower, clus );
+						    }
+					    }
+				    }
 			    }
-			    if(!m_linkBreakDecision.accept(link)){
-				break;
+		    }
+
+
+		//////////
+		/*
+		Set<Track> trks = shower.getTracks();
+			
+		for( Track trk : trks )
+		    {
+			Cluster seed_ = shower.getSeed( trk );
+			
+			if( leftoverHitClusters.contains( seed_ ) && clumps.contains( seed_ ) )// &&  seed_.getCalorimeterHits().size() >= 6 )
+			    {
+				for( Cluster clus : m_bookKeeper.getClusterList( "Linkable Clusters Excluding Photons" ) )
+				    {
+					boolean isOk = false;
+					
+					try
+					    {
+						isOk = m_LQChecker.accept( trk , clus );
+					    }
+					catch( LinkDecisions.DecisionCannotBeMadeException e )
+					    {
+						isOk = false;
+					    }
+					
+					if( isOk )
+					    {
+						if( !shower.contains( clus ) )
+						    {
+							m_showerContainer.addClusterToShower( shower, clus );
+						    }
+					    }
+				    }
 			    }
-			}catch(LinkDecisions.DecisionCannotBeMadeException e){continue;}
-			if(bestLink == null || link.score() > bestLink.score()){
-			    bestLink = link;
-			}
 		    }
+		*/
+
+		//////////
+
+		/*
+		Set<Track> trks = shower.getTracks();
+			
+		for( Track trk : trks )
+		    {
+			for( Cluster clus : m_bookKeeper.getClusterList( "Linkable Clusters Excluding Photons" ) )
+			    {
+				boolean isOk = false;
+				
+				try
+				    {
+					isOk = m_LQChecker.accept( trk , clus );
+				    }
+				catch( LinkDecisions.DecisionCannotBeMadeException e )
+				    {
+					isOk = false;
+				    }
+				
+				if( isOk )
+				    {
+					if( !shower.contains( clus ) )
+					    {
+						m_showerContainer.addClusterToShower( shower, clus );
+					    }
+				    }
+			    }
+		    }
+		*/
+
+		//////////
+
+
+		if(m_properties.getFlag("debug")){
+		    System.out.println("ShowerBuilding: After assigning clusters to seeds, shower "+si+" has "+shower.size()+" assigned clusters");
 		}
+		si++;
 	    }
-	    if(bestLink != null){
-		Cluster base = bestLink.getClusters()[0];
-		Cluster target = bestLink.getClusters()[1];
-		Set<Shower> baseShowers = m_showerContainer.getShowers(base);
-		for(Shower shower : baseShowers){
-		    m_showerContainer.addClusterToShower(shower, target);
-		}
-		//		debugPrintLink(bestLink, baseShowers);
-	    }else{
-		break;
-	    }
-	}
 
 	if(m_properties.getFlag("keepShowerContainersAtEachStep")) {
-	    m_bookKeeper.addShowerContainer("showers after first iteration", m_showerContainer.clone());
+	    m_bookKeeper.addShowerContainer("showers after first iteration"+(useTrack?" using track":""), m_showerContainer.clone());
 	}
+
+	MiscUtilities.useTrackDirection = false;
     }
 
+
+
+
+
     /**
      * second pass on charged shower building:
      *  - a first pass on neutral shower building is done already at this step.
@@ -345,45 +732,43 @@
 	    chargedClumpsCorrectionInEcal();
 	}
 
-	//debugPrintShowerStatus("DebugFirstIteration");
-
 	/// decided to do a first pass on neutral hadrons here to solve charged/neutral ambiguities
-        neutralHadronsFirstIteration();
-	//	perfectNeutralHadronsFirstIteration();
+	neutralHadronsFirstIteration();
+	//perfectNeutralHadronsFirstIteration();
 
-	// now come back and detect patological cases of E >> p
-	// these should be either combined with a nearby track or can be flagged as stealing neutral energy
-	//	applyOverrides();
+        /// correction of the charged hadrons looking to the photons.
+	//chargedHadronsPhotonsCorrection();
 
-// 	debugPrintShowerStatus("DebugOverrides");
 
-// 	if(m_properties.getFlag("doPerfectSecondIteration")){
-// 	    perfectSecondIteration();
-// 	}else{
+	// perfect second iteration
+	if( m_properties.getFlag( "doPerfectSecondIteration" ) )
+	    {
+		perfectSecondIteration();
+	    }
+	else
+	    {
+		/// find primary neutrals
+		if( m_properties.getFlag( "makeShowerLikelihoodPDF" ) ) {
+		    return;
+		}
+		flagPrimaryNeutrals();
+		
+		/// resolve all overlaps...
+		//	resolveOverlaps();
+		
+		/// now look for detached charged energy and assign them to the correct track
+		if(m_properties.getFlag("makeShowerToShowerLikelihoodPDF")){
+		    return;
+		}
+		oldLinkSecondaryNeutrals();
+	    }
 
-// 	    /// correction of the charged hadrons looking to the photons.
-// 	    //chargedHadronsPhotonsCorrection();
-	    
-	/// find primary neutrals
-	if( m_properties.getFlag( "makeShowerLikelihoodPDF" ) ) {
-	    return;
-	}
-	flagPrimaryNeutrals();
-	
-// 	    /// resolve all overlaps...
-// 	    //	resolveOverlaps();
-	    
-// 	    /// now look for detached charged energy and assign them to the correct track
-	if(m_properties.getFlag("makeShowerToShowerLikelihoodPDF")){
-	    return;
-	}
-	oldLinkSecondaryNeutrals();
-// 	}
+	/// now check overlaps between neutral and charged showers and resolve
+	resolveChargedNeutralOverlaps();
 
-// 	/// now check overlaps between neutral and charged showers and resolve
-	    // resolveChargedNeutralOverlaps();
+	// now do third iteration
+	thirdIteration();
 
-    //debugPrintShowerStatus("DebugSecondIteration");
     }
 
     protected void chargedClumpsCorrectionInEcal()
@@ -455,58 +840,58 @@
 
 	    shower.declareFlag( "isPrimary" , false );
 
-	    assignLinksToCluster(seed, shower, m_showerContainer);
+	    assignLinksToCluster(seed, shower, m_showerContainer, notYetLinkedClusters);
 
 	    notYetLinkedClusters.removeAll(m_showerContainer.getUsedClusters());
 	}
 
-         if( m_properties.getFlag( "keepShowerContainersAtEachStep" ) ||
- 	    m_properties.getFlag("makeShowerLikelihoodPDF") ) {
- 	    m_bookKeeper.addShowerContainer( "showers after neutral showers first iterations" , m_showerContainer.clone() );
- 	}
+        if( m_properties.getFlag( "keepShowerContainersAtEachStep" ) ||
+	    m_properties.getFlag("makeShowerLikelihoodPDF") ) {
+	    m_bookKeeper.addShowerContainer( "showers after neutral showers first iterations" , m_showerContainer.clone() );
+	}
     }
 
     protected void perfectNeutralHadronsFirstIteration() {
 
-	List<SharedClusterGroup> allSharedClusters = m_bookKeeper.getAllSharedClusters();
-
-	/// seeds are clusters not yet used and close to the edge of the Ecal...
+        List<SharedClusterGroup> allSharedClusters = m_bookKeeper.getAllSharedClusters();
 
-	/// First create a sorted list of such clusters were the order goes inside-out from the center of the detector...
-	List<Cluster> notYetLinkedClusters = new Vector<Cluster>();
-	notYetLinkedClusters.addAll(m_bookKeeper.getClusterList("Linkable Clusters Excluding Photons"));
-	notYetLinkedClusters.removeAll(m_showerContainer.getUsedClusters());
-	Collections.sort(notYetLinkedClusters, new PFAUtil.InsideOutNegativePoleCoparator());
+        /// seeds are clusters not yet used and close to the edge of the Ecal...                                                                                                                                                                                         
 
-	/// Loop on the not yet used clusters:
-	//   - take the closest one to the center of the detector and treat it as a seed
-	//   - build a neutral shower for that seed
-	//   - remove all used clusters from the not yet used clusters list
-	//   - stop when the list is empty
-	while(notYetLinkedClusters.size() > 0){
-	    Cluster seed = notYetLinkedClusters.get(0);
-	    if(seed == null){
-		throw new AssertionError("Book keeping error");
-	    }
+        /// First create a sorted list of such clusters were the order goes inside-out from the center of the detector...                                                                                                                                                
+        List<Cluster> notYetLinkedClusters = new Vector<Cluster>();
+        notYetLinkedClusters.addAll(m_bookKeeper.getClusterList("Linkable Clusters Excluding Photons"));
+        notYetLinkedClusters.removeAll(m_showerContainer.getUsedClusters());
+        Collections.sort(notYetLinkedClusters, new PFAUtil.InsideOutNegativePoleCoparator());
+
+        /// Loop on the not yet used clusters:                                                                                                                                                                                                                           
+        //   - take the closest one to the center of the detector and treat it as a seed                                                                                                                                                                                 
+        //   - build a neutral shower for that seed                                                                                                                                                                                                                      
+        //   - remove all used clusters from the not yet used clusters list                                                                                                                                                                                              
+        //   - stop when the list is empty                                                                                                                                                                                                                               
+        while(notYetLinkedClusters.size() > 0){
+            Cluster seed = notYetLinkedClusters.get(0);
+            if(seed == null){
+                throw new AssertionError("Book keeping error");
+            }
 
-	    Shower shower = m_showerContainer.createShower(m_neutralCalib, allSharedClusters, seed);
+            Shower shower = m_showerContainer.createShower(m_neutralCalib, allSharedClusters, seed);
 
-	    shower.declareFlag( "isPrimary" , false );
+            shower.declareFlag( "isPrimary" , false );
 
-	    for(Cluster cluster : notYetLinkedClusters){
-		if(cluster == seed) continue;
-		if(shower.contains(cluster)) continue;
-		try {
-		    if(m_LQChecker.accept(cluster, seed)){
-			m_showerContainer.addClusterToShower(shower, cluster);
-		    }
-		}catch(LinkDecisions.DecisionCannotBeMadeException e){
-		    continue;
-		}
-	    }
+            for(Cluster cluster : notYetLinkedClusters){
+                if(cluster == seed) continue;
+                if(shower.contains(cluster)) continue;
+                try {
+                    if(m_LQChecker.accept(cluster, seed)){
+                        m_showerContainer.addClusterToShower(shower, cluster);
+                    }
+                }catch(LinkDecisions.DecisionCannotBeMadeException e){
+                    continue;
+                }
+            }
 
-	    notYetLinkedClusters.removeAll(m_showerContainer.getUsedClusters());
-	}
+            notYetLinkedClusters.removeAll(m_showerContainer.getUsedClusters());
+        }
     }
 
     protected void chargedHadronsPhotonsCorrection()
@@ -598,259 +983,6 @@
             }
     }
 
-    protected void applyOverrides(){
-
-        Set<Shower> allShowers = new HashSet<Shower>();
-	allShowers.addAll(m_showerContainer.getShowers());
-	List<SharedClusterGroup> allSharedClusters = m_bookKeeper.getAllSharedClusters();
-
-	Set<Set<Shower>> showerGroups = m_showerContainer.getGroupsOfOverlappingShowers();
-	Map<Shower, Set<Shower>> showerGroupMap = new HashMap<Shower, Set<Shower>>();
-	for(Set<Shower> showerGroup : showerGroups){
-	    Set<Shower> copyShowerGroup = new HashSet<Shower>();
-	    copyShowerGroup.addAll(showerGroup);
-	    for(Shower shower : copyShowerGroup){
-		showerGroupMap.put(shower, copyShowerGroup);
-	    }
-	}
-
-	for(Shower shower : allShowers){
-
-	    if(shower.isNeutral()) continue;
-	    if(!m_showerContainer.contains(shower)) continue;
-
-	    double energy = shower.realEnergy();
-	    double momentum = shower.scalarMomentum();
-	    Hep3Vector vecMomentum = shower.momentum();
-	    double sigma = shower.estimatedEnergyUncertainty();
-
-	    double residual = ( energy - momentum ) / sigma;
-
-	    double energySharedWithCharged = 0;
-	    double energySharedWithNeutral = 0;
-	    Set<Shower> showerGroup = showerGroupMap.get(shower);
-	    if(showerGroup != null){
-		Set<Cluster> allSharedClustersWithCharged = new HashSet<Cluster>();
-		Set<Cluster> allSharedClustersWithNeutral = new HashSet<Cluster>();
-		for(Shower sh : showerGroup){
-		    if(sh == shower) continue;
-		    if(!m_showerContainer.contains(sh)) continue;
-		    for(Cluster cl : sh.getShowerComponents()){
-			if(shower.contains(cl)){
-			    if(sh.isNeutral()){
-				allSharedClustersWithNeutral.add(cl);
-			    }else{
-				allSharedClustersWithCharged.add(cl);
-			    }
-			}
-		    }
-		}
-		energySharedWithCharged = PFAUtil.energy(allSharedClustersWithCharged, allSharedClusters, shower.getEnergyCalculator());
-		energySharedWithNeutral = PFAUtil.energy(allSharedClustersWithNeutral, allSharedClusters, shower.getEnergyCalculator());
-	    }
-
-	    // apply overrides for large showers:
-	    // - check if shower overlaps with another charged shower where if combined, the two showers have good E/P
-	    // - else if shower overlaps with a neutral shower: should do something here.
-	    // - else should do something else here
-	    if(residual > 1.5){
-
[truncated at 1000 lines; 1146 more skipped]

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
NonTrivialPFA.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- NonTrivialPFA.java	23 Oct 2011 09:50:31 -0000	1.2
+++ NonTrivialPFA.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -11,7 +11,7 @@
 import org.lcsim.digisim.DigiSimDriver;
 import org.lcsim.digisim.SimCalorimeterHitsDriver;
 import org.lcsim.recon.pfa.cheat.PerfectIdentifier;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.pfa.output.FlushReconstructedParticlesDriver;
 import org.lcsim.recon.cluster.mst.*;
 import org.lcsim.recon.cluster.mipfinder.*;
@@ -29,7 +29,7 @@
  * a List<ReconstructedParticle>, written to the event as
  * PFAReconstructedParticles.
  *
- * @version $Id: NonTrivialPFA.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+ * @version $Id: NonTrivialPFA.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
  * @author Mat Charles <[log in to unmask]>
  */
 
@@ -341,8 +341,8 @@
 	    {
 		LocalHelixExtrapolationTrackClusterMatcher extrapolate = new LocalHelixExtrapolationTrackClusterMatcher(new LocalHelixExtrapolator());
 		extrapolate.setCutSeparation(14.0); // about two cells
-		org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher simpleExtrapolate = new org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher(14.0);
-		org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher simpleCheatExtrapolate = new org.lcsim.recon.pfa.identifier.CheatHelixTrackClusterMatcher(14.0);
+		org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackClusterMatcher simpleExtrapolate = new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackClusterMatcher(14.0);
+		org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackClusterMatcher simpleCheatExtrapolate = new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.CheatHelixTrackClusterMatcher(14.0);
 		CheatTrackClusterMatcher cheater = new CheatTrackClusterMatcher(mcListName);
 		CheckSkeletonsForMultipleTracks separate = new CheckSkeletonsForMultipleTracks(evalWrapper, trackList, eventSkeletonClusters, eventSplitSkeletonClusters, eventMips, eventClumps, extrapolate); // LOCAL HELIX EXTRAPOLATION BASED ON SimTrackerHits
 		//CheckSkeletonsForMultipleTracks separate = new CheckSkeletonsForMultipleTracks(evalWrapper, trackList, eventSkeletonClusters, eventSplitSkeletonClusters, eventMips, eventClumps, cheater); // CHEATING
@@ -846,8 +846,8 @@
 	MIPChargedParticleMaker hadIDmip = new MIPChargedParticleMaker();
 	LocalHelixExtrapolationTrackMIPClusterMatcher mipMatch = new LocalHelixExtrapolationTrackMIPClusterMatcher(new LocalHelixExtrapolator());
 	mipMatch.setDebug(debug);
-	//org.lcsim.recon.pfa.identifier.SimpleTrackMIPClusterMatcher mipMatch = new org.lcsim.recon.pfa.identifier.SimpleTrackMIPClusterMatcher();
-	//org.lcsim.recon.pfa.identifier.SimpleTrackMIPClusterMatcher mipMatch = new org.lcsim.recon.pfa.identifier.CheatHelixTrackMIPClusterMatcher();
+	//org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackMIPClusterMatcher mipMatch = new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackMIPClusterMatcher();
+	//org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackMIPClusterMatcher mipMatch = new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.CheatHelixTrackMIPClusterMatcher();
 	add(mipMatch);
 	hadIDmip.setTrackMatcher(mipMatch);
 	hadIDmip.setInputTrackList(trackList);
@@ -867,8 +867,8 @@
 	// Then try the clusters generically:
 	SimpleChargedParticleMaker hadID = new SimpleChargedParticleMaker();
 	LocalHelixExtrapolationTrackClusterMatcher clusMatch = new LocalHelixExtrapolationTrackClusterMatcher(new LocalHelixExtrapolator());
-	//org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher clusMatch = new org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher();
-	//org.lcsim.recon.pfa.identifier.SimpleTrackClusterMatcher clusMatch = new org.lcsim.recon.pfa.identifier.CheatHelixTrackClusterMatcher();
+	//org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackClusterMatcher clusMatch = new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackClusterMatcher();
+	//org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.SimpleTrackClusterMatcher clusMatch = new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.CheatHelixTrackClusterMatcher();
 	add(clusMatch);
 	hadID.setTrackMatcher(clusMatch);
 	hadID.setInputTrackList(prefix+"tracksMinusMipAssociations");

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PFABookKeepingBroker.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- PFABookKeepingBroker.java	23 Oct 2011 09:50:31 -0000	1.2
+++ PFABookKeepingBroker.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -5,7 +5,7 @@
 import org.lcsim.event.*;
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.util.hitmap.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PFAParticleMaker.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- PFAParticleMaker.java	23 Oct 2011 09:50:31 -0000	1.1
+++ PFAParticleMaker.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -4,9 +4,9 @@
 import hep.physics.vec.*;
 import hep.physics.particle.properties.*;
 import org.lcsim.event.*;
-import org.lcsim.recon.cluster.util.*;
-import org.lcsim.recon.pfa.identifier.*;
 import org.lcsim.event.base.*;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.util.decision.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PFAUtil.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- PFAUtil.java	23 Oct 2011 09:50:31 -0000	1.2
+++ PFAUtil.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -7,7 +7,7 @@
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.geometry.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
 import org.lcsim.event.base.*;
@@ -492,6 +492,7 @@
     }
 
     static public double energy(Cluster mainCluster, List<SharedClusterGroup> listOfShares, ClusterEnergyCalculator calib) {
+	if(listOfShares == null) return energy(mainCluster, calib);
 	List<FuzzyCalorimeterHit> allFuzzyHits = new Vector<FuzzyCalorimeterHit>();
 	for (SharedClusterGroup share : listOfShares) {
 	    List<FuzzyCalorimeterHit> fuzzyHitsOfThisShare = findFuzzyHitsForCluster(mainCluster, share);
@@ -653,6 +654,7 @@
      * Checks for duplicate hits in a collection of clusters
      */
     static public void checkForDuplicateHitsInClusters(Collection<Cluster> clusters) {
+
 	Set<CalorimeterHit> allUsedHits = new HashSet<CalorimeterHit>();
 	List<CalorimeterHit> allUsedHitsList = new Vector<CalorimeterHit>();
 	for (Cluster clus : clusters) {
@@ -660,7 +662,7 @@
 		allUsedHits.add(hit);
 		allUsedHitsList.add(hit);
 		if (allUsedHits.size() != allUsedHitsList.size()) {
-		    throw new AssertionError("ERROR: Duplicate hit with ID "+hit.getCellID());
+		    return; // throw new AssertionError("ERROR: Duplicate hit with ID "+hit.getCellID());
 		}
 	    }
 	}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PhotonVetoDecision.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- PhotonVetoDecision.java	23 Oct 2011 09:50:31 -0000	1.1
+++ PhotonVetoDecision.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -5,7 +5,7 @@
 import org.lcsim.event.*;
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.util.hitmap.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PhotonVetoDriver.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- PhotonVetoDriver.java	23 Oct 2011 09:50:31 -0000	1.1
+++ PhotonVetoDriver.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -6,7 +6,7 @@
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.util.hitmap.*;
 import org.lcsim.util.lcio.LCIOConstants;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
PreShowerMIPReassignmentAlgorithm.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- PreShowerMIPReassignmentAlgorithm.java	27 May 2011 12:01:12 -0000	1.1
+++ PreShowerMIPReassignmentAlgorithm.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -6,7 +6,7 @@
 import org.lcsim.event.*;
 import hep.physics.vec.*;
 import org.lcsim.geometry.Subdetector;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.cluster.util.BasicCluster;
 import org.lcsim.recon.util.CalorimeterInformation;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
ReclusterDTreeDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ReclusterDTreeDriver.java	23 Oct 2011 09:50:31 -0000	1.2
+++ ReclusterDTreeDriver.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -7,7 +7,7 @@
 import org.lcsim.util.hitmap.*;
 import org.lcsim.event.*;
 import org.lcsim.recon.cluster.util.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.event.base.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.*;
 import org.lcsim.util.decision.*;
@@ -27,7 +27,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -68,6 +68,7 @@
     protected DebugShowersDriver m_debugShowersCorrectionInEcal = null;
     protected DebugShowersDriver m_debugShowersNeutralFirstIteration = null;
     protected DebugShowersDriver m_debugShowersSecondaryNeutral = null;
+    protected DebugShowersDriver m_debugShowersThirdIteration = null;
     protected DebugShowersDriver m_debugShowersNeutralChargedOverlaps = null;
 
     public void setDebugShowers(boolean doDebug){
@@ -79,6 +80,7 @@
 	    add(m_debugShowersCorrectionInEcal);
 	    add(m_debugShowersNeutralFirstIteration);
 	    add(m_debugShowersSecondaryNeutral);
+	    add(m_debugShowersThirdIteration);
 	    add(m_debugShowersNeutralChargedOverlaps);
         }
     }
@@ -188,6 +190,8 @@
 	m_debugShowersNeutralFirstIteration.setOutputFileName( "ShowersAfterNeutralFirstIteration.aida" );
 	m_debugShowersSecondaryNeutral = new DebugShowersDriver( m_bookKeeper, m_extrapolator , mcList );
 	m_debugShowersSecondaryNeutral.setOutputFileName( "ShowersAfterSecondaryNeutralsLikelihood.aida" );
+	m_debugShowersThirdIteration = new DebugShowersDriver( m_bookKeeper, m_extrapolator , mcList );
+	m_debugShowersThirdIteration.setOutputFileName( "ShowersAfterThirdIteration.aida" );
 	m_debugShowersNeutralChargedOverlaps = new DebugShowersDriver( m_bookKeeper, m_extrapolator , mcList );
 	m_debugShowersNeutralChargedOverlaps.setOutputFileName( "ShowersAfterNeutralChargedOverlapRemoval.aida" );
 
@@ -259,6 +263,7 @@
 	m_properties.declareCut("firstConeAlgorithm1" , 0.95 );
 	m_properties.declareCut("firstConeAlgorithm2" , 0.9 );
 	m_properties.declareKey("MCListName", m_mcList);
+	m_properties.declareCut("numberOfShowerBuildingIterations", 3);
 	m_properties.declareKey("EcalDigiHitMapName", "EcalDigiHitMap");
 	m_properties.declareKey("HcalDigiHitMapName", "HcalDigiHitMap");
         m_properties.declareKey("MuonTrackClusterMapName", "MuonTrackClusterMap");
@@ -524,26 +529,32 @@
 // 	    }
 //             ShowerContainer scNeutralFirstIteration = m_bookKeeper.getShowerContainer( "showers after flagging primary neutrals" );
 //             m_debugShowersNeutralFirstIteration.doAnalysis( scNeutralFirstIteration , linkableClusters , allSharedClusters );
-//             ShowerContainer scSecondaryNeutral = m_bookKeeper.getShowerContainer( "charged showers after secondary neutrals likelihood" );
-//             m_debugShowersSecondaryNeutral.doAnalysis( scSecondaryNeutral, linkableClusters , allSharedClusters );
+             ShowerContainer scSecondaryNeutral = m_bookKeeper.getShowerContainer( "charged showers after secondary neutrals likelihood" );
+             m_debugShowersSecondaryNeutral.doAnalysis( scSecondaryNeutral, linkableClusters , allSharedClusters );
+             ShowerContainer scThirdIteration = m_bookKeeper.getShowerContainer( "showers after third iterations" );
+             m_debugShowersThirdIteration.doAnalysis( scThirdIteration, linkableClusters , allSharedClusters );
 //             ShowerContainer scNeutralChargedOverlaps = m_bookKeeper.getShowerContainer( "charged showers after resolving charged/neutral overlaps" );
 //             m_debugShowersNeutralChargedOverlaps.doAnalysis( scNeutralChargedOverlaps , linkableClusters , allSharedClusters );
 
 	}
-
-// 	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/180, "regionsAfterFirstIteration_1degree");
-// 	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/180, "regionsAfterSecondIteration_1degree");
-// 	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/90, "regionsAfterFirstIteration_2degree");
-// 	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/90, "regionsAfterSecondIteration_2degree");
-// 	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/60, "regionsAfterFirstIteration_3degree");
-// 	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/60, "regionsAfterSecondIteration_3degree");
-// 	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/45, "regionsAfterFirstIteration_4degree");
-// 	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/45, "regionsAfterSecondIteration_4degree");
-// 	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/36, "regionsAfterFirstIteration_5degree");
-// 	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/36, "regionsAfterSecondIteration_5degree");
-// 	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/18, "regionsAfterFirstIteration_10degree");
-// 	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/18, "regionsAfterSecondIteration_10degree");
+	/*
+	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/180, "regionsAfterFirstIteration_1degree");
+	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/180, "regionsAfterSecondIteration_1degree");
+	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/90, "regionsAfterFirstIteration_2degree");
+	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/90, "regionsAfterSecondIteration_2degree");
+	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/60, "regionsAfterFirstIteration_3degree");
+	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/60, "regionsAfterSecondIteration_3degree");
+	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/45, "regionsAfterFirstIteration_4degree");
+	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/45, "regionsAfterSecondIteration_4degree");
+	// 	m_debugRegions.doAnalysis("charged showers after tiny neutrals", Math.PI/45, "regionsAfterTinyNeutrals_4degree");
+	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/36, "regionsAfterFirstIteration_5degree");
+	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/36, "regionsAfterSecondIteration_5degree");
+// 	m_debugRegions.doAnalysis("charged showers after tiny neutrals", Math.PI/36, "regionsAfterTinyNeutrals_5degree");
+// 	m_debugRegions.doAnalysis("charged showers after applying overrides", Math.PI/36, "regionsAfterApplyingOverrides_5degree");
+	m_debugRegions.doAnalysis("showers after neutral showers first iterations", Math.PI/18, "regionsAfterFirstIteration_10degree");
+	m_debugRegions.doAnalysis("charged showers after secondary neutrals likelihood", Math.PI/18, "regionsAfterSecondIteration_10degree");
 	//	m_debugNeutrals.doAnalysis("All Showers", "allShowers");
+	*/
 
 	// Outputs
 	m_particleMaker.makeParticles();

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
ReclusterDriver.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ReclusterDriver.java	23 Oct 2011 09:50:31 -0000	1.2
+++ ReclusterDriver.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -15,7 +15,7 @@
 import org.lcsim.recon.cluster.mipfinder.*;
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.clumpfinder.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.mc.fast.tracking.ReconTrack;
 import org.lcsim.event.base.*;
 import hep.physics.particle.Particle;
@@ -42,7 +42,7 @@
   * This version is superseeded by ReclusterDTreeDriver,
   * which derives from it.
   *
-  * @version $Id: ReclusterDriver.java,v 1.2 2011/10/23 09:50:31 zaidan Exp $
+  * @version $Id: ReclusterDriver.java,v 1.3 2012/04/11 15:49:36 zaidan Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -125,7 +125,7 @@
 	m_inputClumps = clumps;
 	m_inputSplitSkeletonClusters = splitSkeletonClusters;
 
-	initTrackMatch(new org.lcsim.recon.pfa.identifier.TrackHelixPlusHitExtrapolator());
+	initTrackMatch(new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackHelixPlusHitExtrapolator());
 	initCalibration();
 	if (m_debug) { initPlots(); }
 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
RunAndWriteOutPFAFullTracking.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- RunAndWriteOutPFAFullTracking.java	23 Oct 2011 09:50:31 -0000	1.2
+++ RunAndWriteOutPFAFullTracking.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -65,18 +65,18 @@
 	//	add(new DebugSubClustersDriver("MCParticle"));
 
 	// resolution + some histograms
-	add(new MassPlots("FlushedDTreeReclusteredParticles", "output-write-reclustered-dtree.aida")); // Make some histograms and write them to a file
+	//	add(new MassPlots("FlushedDTreeReclusteredParticles", "output-write-reclustered-dtree.aida")); // Make some histograms and write them to a file
 
 	m_eventSelection = new EventSelection(0, 0.95);
 	m_eventCount = 0;
     }
 
-    protected String m_linkLikelihoodPath = "structuralPFA/linkLikelihood.contrib.bin";
+    protected String m_linkLikelihoodPath = "structuralPFA/linkLikelihood.bin";
     public void setLinkLikelihoodPath(String path) { m_linkLikelihoodPath = path; }
-    protected String m_primaryShowerLikelihoodPath = "structuralPFA/likelihood.PrimaryShower.contrib.bin";
-    public void setPrimaryShowerLikelihoodPath(String path) { m_primaryShowerLikelihoodPath = path; }
-    protected String m_showerToShowerLikelihoodPath = "structuralPFA/likelihood.ShowerToShower.contrib.bin";
+    protected String m_showerToShowerLikelihoodPath = "structuralPFA/likelihood.ShowerToShower.bin";
     public void setShowerToShowerLikelihoodPath(String path) { m_showerToShowerLikelihoodPath = path; }
+    protected String m_primaryShowerLikelihoodPath = "structuralPFA/likelihood.PrimaryShower.bin";
+    public void setPrimaryShowerLikelihoodPath(String path) { m_primaryShowerLikelihoodPath = path; }
     protected int m_runMode = 0; // 0: analysis - 1: link training - 2: primary shower training - 3: shower to shower training
     public void setRunMode(int runMode){ m_runMode = runMode; }
 
@@ -93,9 +93,25 @@
 	}
 	
 	System.out.println("Start of event: "+m_eventCount);
-	//	if(m_eventSelection.pass(event)){
-	super.process(event);
-	//}
+	if(m_eventSelection.pass(event)){
+
+	// 	List<Driver> subDrivers = super.drivers();
+	// 	for (Driver driver : subDrivers){
+	// 	    if(driver instanceof MassPlots){
+	// 		Boolean isEventOK = true;//(Boolean)event.get("isEventOK_regionsAfterSecondIteration_5degree");
+	// 		if(isEventOK){
+	// 		    driver.doProcess(event);
+	// 		}
+	// 	    }else{
+	// 		driver.doProcess(event);
+	// 	    }
+	// 	}
+
+      	super.process(event);
+
+
+
+	}
 	System.out.println("End of event: "+m_eventCount);
 	m_eventCount++;
     }

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
SetUpDTreeForReclustering.java 1.3 -> 1.4
diff -u -r1.3 -r1.4
--- SetUpDTreeForReclustering.java	23 Oct 2011 10:07:47 -0000	1.3
+++ SetUpDTreeForReclustering.java	11 Apr 2012 15:49:36 -0000	1.4
@@ -14,17 +14,17 @@
 import org.lcsim.recon.cluster.mipfinder.trackxtrap.*;
 import org.lcsim.recon.cluster.muonfinder.MuonFinderWrapper;
 import org.lcsim.recon.cluster.muonfinder.MuonFinderWrapper3;
-//ximport org.lcsim.recon.cluster.myclusterer.*;
+//import org.lcsim.recon.cluster.myclusterer.*;
 import org.lcsim.recon.cluster.util.HitNearBarrelEndcapBoundaryDecision;
 import org.lcsim.recon.cluster.util.RemoveHitsFromClusters;
 import org.lcsim.recon.cluster.util.VetoHitsFromClusters;
-import org.lcsim.recon.pfa.identifier.AmbiguousTrackToClusterMapMaker;
-import org.lcsim.recon.pfa.identifier.HelixExtrapolator;
-import org.lcsim.recon.pfa.identifier.TrackToElectronMapMaker;
-import org.lcsim.recon.pfa.identifier.TrackToGenericClusterMapMaker;
-import org.lcsim.recon.pfa.identifier.TrackToMipClusterMapMaker;
-import org.lcsim.recon.pfa.identifier.TrackToPreShowerMipMapMaker;
-//import org.lcsim.recon.pfa.identifier.TrackToClusterSpecialCasesMapMaker;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.AmbiguousTrackToClusterMapMaker;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.HelixExtrapolator;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackToElectronMapMaker;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackToGenericClusterMapMaker;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackToMipClusterMapMaker;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackToPreShowerMipMapMaker;
+//import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackToClusterSpecialCasesMapMaker;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.debug.DebugTrackToClusterSpecialCases;
 import org.lcsim.util.Driver;
 import org.lcsim.util.ListAddDriver;
@@ -80,7 +80,13 @@
 	    {
 		init = true;
 		CalorimeterInformation ci = CalorimeterInformation.instance();
-		
+
+		if(m_doBaseline) {
+		    System.out.println(">>>>>>>>>>>>>>>>> running baseline setup...");
+		}else{
+		    System.out.println(">>>>>>>>>>>>>>>>> running new setup...");
+		}
+
 		////// R.Z.: Filter out certain type of hits (e.g. neutrals) for testing...
 		{
 		    boolean filterNeutralHadronHits = false;   // should be FALSE for a real PFA !!!!!!!!
@@ -581,7 +587,6 @@
  						 findCluster));
 
 			// now try to fix special cases of track/seed matching
-			/*
 			DebugTrackToClusterSpecialCases specialMapMakerDebug = new DebugTrackToClusterSpecialCases(inputTrackList,
 														   "MCParticle",
 														   "EcalDigiHitMap",
@@ -609,8 +614,8 @@
 			specialMapMakerDebug.addTrackToClusterMap("MapMipClusterTracksToClusterSeeds");
 			specialMapMakerDebug.addTrackToClusterMap("MapGenClusterTracksToClusterSeeds");
 			specialMapMakerDebug.addTrackToClusterMap("MapAmbigClusterTracksToClusterSeeds");
-			add(specialMapMakerDebug);
-			*/
+			//add(specialMapMakerDebug);
+
 
 			// now try to fix special cases of track/seed matching
 			TrackToClusterSpecialCasesMapMaker specialMapMaker = new TrackToClusterSpecialCasesMapMaker(inputTrackList,

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
SetUpPFA.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- SetUpPFA.java	23 Oct 2011 09:50:31 -0000	1.2
+++ SetUpPFA.java	11 Apr 2012 15:49:36 -0000	1.3
@@ -5,7 +5,7 @@
 import org.lcsim.event.*;
 import org.lcsim.event.util.*;
 import org.lcsim.util.decision.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.recon.cluster.util.CalorimeterHitTimeCutDecision;
 import org.lcsim.recon.cluster.util.UpperSubLayerDecision;
 import org.lcsim.recon.util.CalorimeterInformation;
@@ -17,7 +17,7 @@
     UnphysicalTrackDecision dec = new UnphysicalTrackDecision();
     //default input realistic tracks
     ListFilterDriver fil = new ListFilterDriver(dec,"Tracks", "FilteredTrackList", Track.class);
-    HelixExtrapolator findCluster = new org.lcsim.recon.pfa.identifier.TrackHelixPlusHitExtrapolator();
+    HelixExtrapolator findCluster = new org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.TrackHelixPlusHitExtrapolator();
     ReclusterDTreeDriver reclusTree = null;
 
     public void setFilterInputTrack(String x){ fil.setInputList(x); }
@@ -37,7 +37,7 @@
     private boolean useNewInitialMipFinding = false;
     public void setUseNewInitialMipFinding(boolean x){useNewInitialMipFinding = x;}
 
-    protected String m_linkLikelihoodPath = "structuralPFA/linkLikelihood.bin";
+    protected String m_linkLikelihoodPath = "structuralPFA/likelihood.bin";
     public void setLinkLikelihoodPath(String path) { m_linkLikelihoodPath = path; }
     protected String m_showerToShowerLikelihoodPath = "structuralPFA/showerToShowerLikelihood.bin";
     public void setShowerToShowerLikelihoodPath(String path) { m_showerToShowerLikelihoodPath = path; }
@@ -45,6 +45,15 @@
     public void setPrimaryShowerLikelihoodPath(String path) { m_primaryShowerLikelihoodPath = path; }
     protected int m_runMode = 0; // 0: analysis - 1: link training - 2: primary shower training - 3: shower to shower training
     public void setRunMode(int runMode){ m_runMode = runMode; }
+    protected boolean m_doBaselinePFA = false;
+    public void doBaseline(boolean baseline) { m_doBaselinePFA = baseline; }
+    protected int m_nIter = 3;
+    public void numberOfIterations(int n) { m_nIter = n; }
+
+    protected String trackList;
+    protected String mcList;
+    public void setTrackList(String tracks) { trackList = tracks; }
+    public void setMcList(String mc) { mcList = mc; }
 
     public SetUpPFA(){
         this("Tracks");
@@ -52,9 +61,6 @@
     public SetUpPFA(String trackList) {
 	this(trackList, "ReconFSParticles");
     }
-
-    String trackList;
-    String mcList;
     public SetUpPFA(String _trackList, String _mcList){
 	trackList = _trackList;
 	mcList = _mcList;
@@ -67,6 +73,12 @@
         {
             init = true;
 
+	    if(m_doBaselinePFA) {
+		System.out.println(">>>>>>>>>>>>>>>>> running baseline PFA");
+	    }else{
+		System.out.println(">>>>>>>>>>>>>>>>> running new PFA");
+	    }
+
 	    // Filter tracks, removing those with unphysical energy
 	    {
 
@@ -113,7 +125,7 @@
                 allHitLists.add("CorrMuonBarrelDigiHits");
                 mstHitLists.add(ci.getDigiCollectionName(CalorimeterType.MUON_ENDCAP));
                 SetUpDTreeForReclustering setup = new SetUpDTreeForReclustering("FilteredTrackList", allHitLists, recoHitLists, mstHitLists, findCluster);
-		//setup.doBaseline(true);
+		setup.doBaseline(m_doBaselinePFA || m_runMode == 1);
 		//setup.setSafeMode(true);
 		//setup.cheatOnPhotons(true);
 		//setup.doCheatClustering(true);
@@ -138,25 +150,32 @@
             reclusTree.addInputLeftoverHits("LeftoverHitsInsideTreesECAL");
             reclusTree.addInputLeftoverHits("LeftoverHitsInsideTreesHCAL");
             reclusTree.addInputLeftoverHits("LeftoverHitsInsideTreesMCAL");
- 	    //reclusTree.addTrackToClusterMap("MapPreShowerMipTracksToClusterSeeds");
- 	    //reclusTree.addTrackToClusterMap("MapMipClusterTracksToClusterSeeds");
- 	    //reclusTree.addTrackToClusterMap("MapGenClusterTracksToClusterSeeds");
- 	    //reclusTree.addTrackToClusterMap("MapAmbigClusterTracksToClusterSeeds");
-	    reclusTree.addTrackToClusterMap("TracksMatchedToClusters");
+	    if(m_doBaselinePFA || m_runMode == 1) {
+		reclusTree.addTrackToClusterMap("MapPreShowerMipTracksToClusterSeeds");
+		reclusTree.addTrackToClusterMap("MapMipClusterTracksToClusterSeeds");
+		reclusTree.addTrackToClusterMap("MapGenClusterTracksToClusterSeeds");
+		reclusTree.addTrackToClusterMap("MapAmbigClusterTracksToClusterSeeds");
+	    } else {
+		reclusTree.addTrackToClusterMap("TracksMatchedToClusters");
+	    }
 	    //reclusTree.getProperties().setFlag("debug", true); // default false
 	    //reclusTree.getProperties().setFlag("safeMode", true); // default false
-	    //reclusTree.getProperties().setFlag("doBaselinePFA", true); // default false
+	    reclusTree.getProperties().setFlag("doBaselinePFA", true); // default false
 	    //reclusTree.getProperties().setFlag("doBaselineShowerBuilding", true); // default false
 	    //reclusTree.getProperties().setFlag("doFirstConeAlgorithm", true); // default false
+	    //reclusTree.getProperties().setCut("numberOfShowerBuildingIterations", (double)m_nIter); // default 3
 	    //reclusTree.getProperties().setFlag("doCheatScoring", true); // default false
 	    //reclusTree.getProperties().setFlag("doPerfectShowerBuilding", true); // default false
 	    //reclusTree.getProperties().setFlag("doPerfectSecondIteration", true); // default false
 	    //reclusTree.getProperties().setCut( "firstConeAlgorithm1" , 0.95 ); // default 0.95
 	    //reclusTree.getProperties().setCut( "firstConeAlgorithm2" , 0.9 ); // default 0.9
-	    reclusTree.getProperties().setCut("scoreCut", 0.9); // default 0.7
+	    // 	    if(!m_doBaselinePFA){
+	    // 		reclusTree.getProperties().setCut("scoreCut", 0.99); // default 0.7
+	    // 	    }
 	    //reclusTree.getProperties().setFlag("doDebugPhotons", true); // default false
 	    //reclusTree.getProperties().setFlag("doDebugParticles", true); // default false
-	    //	    reclusTree.setDebugLinks(true);
+	    //reclusTree.setDebugLinks(true);
+	    //reclusTree.setDebugShowers(true);
 	    reclusTree.getProperties().setKey("LikelihoodPath", m_linkLikelihoodPath);
 	    reclusTree.getProperties().setKey("showerLikelihoodPath", m_primaryShowerLikelihoodPath);
 	    reclusTree.getProperties().setKey("showerToShowerLikelihoodPath", m_showerToShowerLikelihoodPath);

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
TrackToClusterCosAngle.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- TrackToClusterCosAngle.java	23 Oct 2011 09:50:31 -0000	1.1
+++ TrackToClusterCosAngle.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -4,7 +4,7 @@
 import hep.physics.vec.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 public class TrackToClusterCosAngle extends TrackToClusterLikelihoodQuantity
 {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
TrackToClusterDistance.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- TrackToClusterDistance.java	23 Oct 2011 09:50:31 -0000	1.1
+++ TrackToClusterDistance.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -4,7 +4,7 @@
 import hep.physics.vec.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 public class TrackToClusterDistance extends TrackToClusterLikelihoodQuantity
 {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
TrackToClusterForce.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- TrackToClusterForce.java	23 Oct 2011 09:50:31 -0000	1.1
+++ TrackToClusterForce.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -4,7 +4,7 @@
 import hep.physics.vec.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 
 public class TrackToClusterForce extends TrackToClusterLikelihoodQuantity
 {

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
TrackToClusterLikelihoodQuantity.java 1.1 -> 1.2
diff -u -r1.1 -r1.2
--- TrackToClusterLikelihoodQuantity.java	23 Oct 2011 09:50:31 -0000	1.1
+++ TrackToClusterLikelihoodQuantity.java	11 Apr 2012 15:49:36 -0000	1.2
@@ -2,7 +2,7 @@
 
 import org.lcsim.event.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.*;
-import org.lcsim.recon.pfa.identifier.HelixExtrapolator;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.HelixExtrapolator;
 
 /**
   * Extends the StructuralLikelihoodQuantity and foces arguments of type Shower 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural
TrackToClusterSpecialCasesMapMaker.java 1.3 -> 1.4
diff -u -r1.3 -r1.4
--- TrackToClusterSpecialCasesMapMaker.java	23 Oct 2011 10:28:04 -0000	1.3
+++ TrackToClusterSpecialCasesMapMaker.java	11 Apr 2012 15:49:36 -0000	1.4
@@ -9,7 +9,7 @@
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.recon.cluster.mipfinder.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.geometry.Calorimeter.CalorimeterType;
 import org.lcsim.geometry.*;
 
@@ -36,14 +36,11 @@
 
     boolean m_init;
 
-    String m_inputTrackListNameThis;
-
     public TrackToClusterSpecialCasesMapMaker(String inputTrackList, String outputMap, String outputUnmatchedTrackList,
 					      HelixExtrapolator extrapolator) {
 
 	super(inputTrackList, outputMap, outputUnmatchedTrackList);
 
-	m_inputTrackListNameThis = inputTrackList;
 	m_trackToClusterMapNames = new Vector<String>();
 	m_mipListNames = new Vector<String>();
 	m_clumpListNames = new Vector<String>();
@@ -53,7 +50,7 @@
 
 	m_extrapolator = extrapolator;
 
-	m_eval = new LikelihoodEvaluatorWrapper("structuralPFA/linkLikelihood.contrib.bin");
+	m_eval = new LikelihoodEvaluatorWrapper("structuralPFA/likelihood.bin");
 
 	m_properties = new PropertyContainer();
 	m_properties.declareFlag("attachMipsFromDTree", false);
@@ -119,7 +116,7 @@
 	if(m_properties.getFlag("considerMipsInLinkableClusters")) linkableClustersForExtrapolation.addAll(mips);
 	linkableClustersForExtrapolation.addAll(clumps);
 	linkableClustersForExtrapolation.addAll(blocks);
-	trackList = event.get(Track.class, m_inputTrackListNameThis);
+	trackList = event.get(Track.class, m_inputTrackListName);
 
 	// input track-seed matching
 	trackToClusterMap = new HashMap<Track,Cluster>();

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural/sharing
ClusterSharingAlgorithmWrapper.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ClusterSharingAlgorithmWrapper.java	23 Oct 2011 09:50:32 -0000	1.2
+++ ClusterSharingAlgorithmWrapper.java	11 Apr 2012 15:49:37 -0000	1.3
@@ -107,8 +107,6 @@
 	    maxDistanceForProximityClustersMCAL = 99999.9; // effectively no cut-off
 	}
 
-	boolean excludePhotonsFromCone = true;
-
 	List<SharedClusterGroup> allSharedClusters = m_bookKeeper.getAllSharedClusters();
 	// Small clusters
 	{

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural/shower
ShowerBranch.java added at 1.1
diff -N ShowerBranch.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ShowerBranch.java	11 Apr 2012 15:49:37 -0000	1.1
@@ -0,0 +1,92 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower;
+
+import java.util.*;
+import hep.physics.vec.*;
+import org.lcsim.event.*;
+
+public class ShowerBranch
+{
+    protected List<Cluster> m_listClusters = new Vector<Cluster>();
+    protected Cluster m_lastAddedCluster;
+    protected Hep3Vector m_branchPoint = null;
+    protected Hep3Vector m_direction = null;
+    protected Hep3Vector m_position = null;
+    protected int m_id;
+    static protected int ms_instanceCount = 0;
+
+    protected ShowerWithBranches m_mother = null;
+
+    public ShowerBranch()
+    {
+	this(new BasicHep3Vector(0,0,0));
+    }
+
+    public ShowerBranch(Hep3Vector branchPoint)
+    {
+	m_branchPoint = branchPoint;
+	ms_instanceCount++;
+	m_id = ms_instanceCount;
+    }
+
+    public void addCluster( Cluster clus )
+    {
+	m_listClusters.add( clus );
+	m_lastAddedCluster = clus;
+
+	m_position = new BasicHep3Vector( clus.getPosition() );
+
+	Hep3Vector vSum = new BasicHep3Vector( 0. , 0. , 0. );
+	for( int i = 0 ; i < m_listClusters.size() ; i++ )
+	    {
+		Hep3Vector v = null;
+		if( i == 0 )
+		    {
+			v = VecOp.sub( new BasicHep3Vector( m_listClusters.get(i).getPosition() ) , m_branchPoint );
+			v = VecOp.unit( v );
+		    }
+		else
+		    {
+			v = VecOp.sub( new BasicHep3Vector( m_listClusters.get(i).getPosition() ) , new BasicHep3Vector( m_listClusters.get(i-1).getPosition() ) );
+			v = VecOp.unit( v );
+		    }
+		vSum = VecOp.add( vSum , v );
+	    }
+	m_direction = VecOp.mult( 1. / (double)m_listClusters.size() , vSum );
+    }
+
+    public Cluster getLastAddedCluster()
+    {
+	return m_lastAddedCluster;
+    }
+
+    public List<Cluster> getClusters()
+    {
+	return m_listClusters;
+    }
+
+    public int getSize()
+    {
+	return m_listClusters.size();
+    }
+
+    public double getEnergy()
+    {
+	double e = 0.;
+	for( Cluster clus : m_listClusters ) e += clus.getEnergy();
+	return e;
+    }
+
+    public Hep3Vector getDirection() { return m_direction; }
+
+    public Hep3Vector getPosition() { return m_position; }
+
+    public double getMomentum() { return m_mother.getMomentum(); }
+
+    public boolean isCharged() { return m_mother.isCharged(); }
+
+    public int getId() { return m_id; }
+
+    public ShowerWithBranches getMother(){ return m_mother; }
+    public void setMother(ShowerWithBranches mother){ m_mother = mother; }
+
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural/shower
ShowerWithBranches.java added at 1.1
diff -N ShowerWithBranches.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ ShowerWithBranches.java	11 Apr 2012 15:49:37 -0000	1.1
@@ -0,0 +1,112 @@
+package org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.shower;
+
+import java.util.*;
+import hep.physics.vec.*;
+import hep.physics.particle.properties.*;
+import org.lcsim.event.*;
+import org.lcsim.event.base.*;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
+
+public class ShowerWithBranches
+{
+    protected List<ShowerBranch> m_branches = null;
+    static protected int m_idCount = 0;
+    protected int m_id;
+    protected Set<Track> m_tracks;
+
+    List<SharedClusterGroup> m_sharedClusters = null;
+    ClusterEnergyCalculator m_calib;
+
+    public ShowerWithBranches(ClusterEnergyCalculator calib)
+    {
+	m_tracks = new HashSet<Track>();
+	m_calib = calib;
+	m_branches = new Vector<ShowerBranch>();
+	m_idCount ++;
+	m_id = m_idCount;
+    }
+
+    public List<SharedClusterGroup> getSharedClusters() { return m_sharedClusters; }
+    public void setSharedClusters(List<SharedClusterGroup> sharedClusters) { m_sharedClusters = sharedClusters; }
+
+    public Hep3Vector getVecMomentum() {
+	Hep3Vector sumVectMomentum = new BasicHep3Vector( 0. , 0. , 0. );
+	for(Track track : m_tracks) {
+	    sumVectMomentum = VecOp.add( sumVectMomentum , new BasicHep3Vector(track.getMomentum()) );
+	}
+	return sumVectMomentum;
+    }
+
+    public double getMomentum() { 
+	return getVecMomentum().magnitude();
+    }
+
+    public double getEnergyUncertainty() {
+	// prep: pion mass
+	int pdg_pi = 211;
+	ParticleType type_pi = ParticlePropertyManager.getParticlePropertyProvider().get(pdg_pi);
+	BaseParticleID pid_pi = new BaseParticleID(type_pi);
+	double mass_pi = type_pi.getMass();
+
+	double uncertainty = 0;
+	for(Track track : m_tracks) {
+	    double momentum = (new BasicHep3Vector(track.getMomentum())).magnitude();
+	    double energy = Math.sqrt(momentum*momentum + mass_pi*mass_pi);
+	    double sigma = 0.7 * Math.sqrt(energy);
+	    if ( energy < 1.0) {
+		sigma = 0.7;
+	    }
+	    uncertainty += sigma*sigma;
+	}
+
+	return Math.sqrt(uncertainty);
+    }
+
+    public double getEnergy() {
+	double e = 0;
+	for(ShowerBranch branch : m_branches) {
+	    e += branch.getEnergy();
+	}
+	return e;
+    }
+
+    public double getRealEnergy() {
+
+	List<Cluster> clusterList = new Vector<Cluster>();
+        for( ShowerBranch showerBranch : m_branches ) {
+            clusterList.addAll( showerBranch.getClusters() );
+	}
+
+	return PFAUtil.energy(clusterList, m_sharedClusters, m_calib);
+    }
+
+    public boolean isCharged( ) { return m_tracks.size() > 0; }
+
+    public Set<Track> getTracks() { return m_tracks; }
+    public void addTrack(Track track) { m_tracks.add(track); }
+
+    public List<ShowerBranch> getBranches(){ return m_branches; }
+    public void addBranch(ShowerBranch branch){
+	m_branches.add(branch);
+	branch.setMother(this);
+    }
+
+    public int getId() { return m_id; }
+
+    protected boolean m_isSpecial = false;
+    public boolean isSpecial(){ return m_isSpecial; }
+    public void setSpecial(boolean s){ m_isSpecial = s; }
+
+    public Cluster getCluster()
+    {
+        BasicCluster cluster = new BasicCluster();
+
+        for( ShowerBranch showerBranch : m_branches )
+            for( Cluster clus : showerBranch.getClusters() )
+		cluster.addCluster(clus);
+
+        return cluster;
+    }
+}

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural/shower
Shower.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- Shower.java	23 Oct 2011 09:50:32 -0000	1.2
+++ Shower.java	11 Apr 2012 15:49:37 -0000	1.3
@@ -6,7 +6,7 @@
 import org.lcsim.recon.cluster.util.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.sharing.*;
 import org.lcsim.event.base.*;
-import org.lcsim.recon.pfa.identifier.*;
+import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.identifier.*;
 import org.lcsim.util.swim.*;
 import org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.*;
 

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/recon/pfa/structural/shower
ShowerContainer.java 1.2 -> 1.3
diff -u -r1.2 -r1.3
--- ShowerContainer.java	23 Oct 2011 09:50:32 -0000	1.2
+++ ShowerContainer.java	11 Apr 2012 15:49:37 -0000	1.3
@@ -161,8 +161,10 @@
     }
 
     public Shower createShower(ClusterEnergyCalculator calib, List<SharedClusterGroup> listOfShares, Set<Track> tracks, Map<Track, Cluster> seedMap){
-	if(m_trackToShowerMap.get(tracks) != null){
-	    throw new AssertionError("Book keeping error: track already assigned to shower");
+	for(Track track : tracks){
+	    if(m_trackToShowerMap.get(track) != null){
+		//throw new AssertionError("Book keeping error: track already assigned to shower");
+	    }
 	}
 	Shower shower = ShowerFactory.createShower(calib, listOfShares, tracks, seedMap);
 	Set<Cluster> seeds = shower.getSeeds();

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
compile.sh removed after 1.1
diff -N compile.sh
--- compile.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,11 +0,0 @@
-#!/bin/bash -f
-
-codePath=~/pfa/lcsim # <-- change this path according to the local setup
-
-curdir=$PWD
-
-cd $codePath/lcsim-contrib
-
-mvn $1 -DskipTests=true && cd $codePath/lcsim-contrib/src/main/java/org/lcsim && tar -czf $HOME/.JAS3/extensions/snapshot.tar.gz contrib; cd -;
-
-cd $curdir

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
computeCorrelations.sh removed after 1.1
diff -N computeCorrelations.sh
--- computeCorrelations.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-./macro-run.sh org.lcsim.contrib.uiowa.uiowapfa.macros.ComputeCorrelations $@

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
computeEfficiencyVsRejection.sh removed after 1.1
diff -N computeEfficiencyVsRejection.sh
--- computeEfficiencyVsRejection.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-./macro-run.sh org.lcsim.contrib.uiowa.uiowapfa.macros.EfficiencyVsRejection $@

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
exportCLASSPATH.sh removed after 1.1
diff -N exportCLASSPATH.sh
--- exportCLASSPATH.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,9 +0,0 @@
-
-if [ ! -e .jarlist ] ; then
-    echo "ERROR: .jarlist not found: make sure you have run either the java-init.sh or the java-init-cache.sh in your working directory"
-    exit 1
-fi
-
-export CLASSPATH="."
-for f in $(cat .jarlist)  ; { export CLASSPATH="$CLASSPATH:$f"; }
-

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
importCode.csh removed after 1.1
diff -N importCode.csh
--- importCode.csh	27 May 2011 13:09:20 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,186 +0,0 @@
-#!/bin/csh -f
-
-#### This part constitute actually a hard-coded input to this script
-set packages = ""
-set packages = "${packages} recon.cluster.clumpfinder"
-set packages = "${packages} recon.cluster.clumpfinder.kmean"
-set packages = "${packages} recon.cluster.structural"
-set packages = "${packages} recon.cluster.structural.likelihood"
-set packages = "${packages} recon.pfa.structural"
-set packages = "${packages} recon.pfa.structural.sharing"
-set packages = "${packages} recon.pfa.structural.shower"
-
-set mainDir = "lcsim/src/org/lcsim"
-set contribDir = "lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa"
-set mainPrefix = "org.lcsim"
-set contribPrefix = "org.lcsim.contrib.uiowa.uiowapfa"
-############################################################
-
-
-
-alias printInfo 'if(${beVerbose} == true) echo'
-
-set label = "main"
-goto ${label}
-
-displayHelp:
-    if( ${copyMode} == "undefined" ) then
-	set src = "<path>/<relative path to source>"
-	set dst = "<path>/<relative path to destination>"
-	set srcPrefix = "<source root package>"
-	set dstPrefix = "<destination root package>"
-    endif
-    echo ""
-    echo "  This script will import uiowa PFA code from the location:"
-    echo "    ${src}"
-    echo "  to the location:"
-    echo "    ${dst}"
-    echo ""
-    if( ${beVerbose} == true ) then 
-	echo "  The following packages will be immported:"
-	foreach package (${packages})
-	    echo "    ${srcPrefix}.${package} into ${dstPrefix}.${package}"
-	end
-	echo ""
-	echo "  All *.java files in the following locations, if they exist, will be overwritten"
-	foreach package (${packages})
-	    set packageLocation = `echo ${dst}/${package} | sed "s,\.,/,g"`
-	    echo "    ${packageLocation}"
-	end
-	echo ""
-	echo "  Package declarations and import commands will be fixed in the imported *java files."
-	echo ""
-    endif
-    if( ${copyMode} == undefined ) then
-	echo "  where in import mode:"
-	echo "    <relative path to source> = ${mainDir}"
-	echo "    <relative path to destination> = ${contribDir}"
-	printInfo "    <source root package> = ${mainPrefix}"
-	printInfo "    <destination root package> = ${contribPrefix}"
-	echo "  and in export mode:"
-	echo "    <relative path to source> = ${contribDir}"
-	echo "    <relative path to destination> = ${mainDir}"
-	printInfo "    <source root package> = ${contribPrefix}"
-	printInfo "    <destination root package> = ${mainPrefix}"
-	echo ""
-    endif
-    goto ${label}
-
-displayUsage:
-    echo ""
-    echo "  Usage: ${script} [-i,--import|-e,--export] [-v,--verbose] [-h,--help] <path>"
-    echo "    Options are:"
-    echo "      -h, --help: print help message and exit"
-    echo "      -v, --verbose: execute in verbose mode"
-    echo "      -e, --export: export from contrib to main area"
-    echo "      -i, --import: import from main area to contrib"
-    echo ""
-    goto ${label}
-
-# Main
-main:
-
-set script = $0
-set localPath = "path"
-set copyMode = undefined
-set beVerbose = false
-
-foreach arg ($argv)
-    if( ${arg} == "--help" || ${arg} == "-h" ) then
-	set label = helping
-	goto displayUsage
-	helping:
-	    set copyMode = undefined
-	    set beVerbose = true
-	    set label = endOfScript
-	    goto displayHelp
-    else if( ${arg} == "--verbose" || ${arg} == "-v" ) then
-	set beVerbose = true
-    else if( ${arg} == "--export" || ${arg} == "-e" ) then
-	set copyMode = exporting
-    else if( ${arg} == "--import" || ${arg} == "-i" ) then
-	set copyMode = importing
-    else if( ${localPath} == "path" ) then
-	set localPath = ${arg}
-    else
-	echo "agument $arg was ignored: path = $localPath"
-    endif
-end
-
-if( ${copyMode} == exporting ) then
-    set src = "${localPath}/${contribDir}"
-    set dst = "${localPath}/${mainDir}"
-    set srcPrefix = ${contribPrefix}
-    set dstPrefix = ${mainPrefix}
-else if( ${copyMode} == importing ) then
-    set src = "${localPath}/${mainDir}"
-    set dst = "${localPath}/${contribDir}"
-    set srcPrefix = ${mainPrefix}
-    set dstPrefix = ${contribPrefix}
-else 
-    echo "You must run in either import or export mode"
-    set label = endOfScript
-    goto displayUsage
-endif
-
-if (${localPath} == "path") then
-    echo "Not enough arguments"
-    set label = endOfScript
-    goto displayUsage
-endif
-
-set label = promptForProceed
-goto displayHelp
-
-promptForProceed:
-
-echo "Do you wish to proceed with this operation? [y/N]"
-set resp = $<
-set resp = `echo ${resp} | tr '[A-Z]' '[a-z]'`
-if(${resp} != "y") then
-    echo "operation aborted by user..."
-    goto endOfScript
-endif
-
-if (! -d ${src}) then 
-    echo "the code was not found at the source:"
-    echo "  ${src}: no such directory"
-    goto endOfScript
-endif
-
-foreach package ($packages)
-    set srcPackage = ${srcPrefix}.${package}
-    set dstPackage = ${dstPrefix}.${package}
-    set packageSrcDir = `echo ${src}/${package} | sed "s,\.,/,g"`
-    set packageDstDir = `echo ${dst}/${package} | sed "s,\.,/,g"`
-    echo "Working on package: ${srcPackage}"
-    printInfo "  destination package: ${dstPackage}"
-    if(! -d ${packageSrcDir}) then 
-	echo "source code not found: ${packageSrcDir} no such directory"
-	goto endScript
-    endif
-    printInfo "  source code is at: ${packageSrcDir}"
-    printInfo "  destination code is at: ${packageDstDir}"
-    if(-d ${packageDstDir}) then
-	printInfo "  destination code already exist: removing *.java files from destination"
-	rm -f ${packageDstDir}/*.java
-    else
-	printInfo "  destination code does not exist: creating it"
-	mkdir -p ${packageDstDir}
-    endif
-    printInfo "  now copying *.java files from source to destination"
-    cp ${packageSrcDir}/*.java ${packageDstDir}
-    printInfo "  now fixing package declarations and import commands"
-    foreach p (${packages})
-	set srcp = ${srcPrefix}.${p}
-        set dstp = ${dstPrefix}.${p}
-	echo "    replacing ${srcp} by ${dstp}"
-	foreach file (${packageDstDir}/*.java)
-	    cat ${file} | sed "s,${srcp},${dstp},g" > tmp
-	    mv tmp ${file}
-	end
-    end
-end
-
-endOfScript:
-    exit

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
java-init-cache.sh removed after 1.1
diff -N java-init-cache.sh
--- java-init-cache.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,26 +0,0 @@
-#! /bin/sh
-
-# For a linux machine at SLAC:
-SOURCEDIR_JAS3=/afs/slac.stanford.edu/package/jas/release/jas3/jas3-0.8.3
-
-if [ ! -d "$SOURCEDIR_JAS3" ]; then
-    echo "WARNING: jas3 not found at $SOURCEDIR_JAS3"
-fi
-
-# For a unix machine in general:
-SOURCEDIR_HOME=$HOME/.JAS3
-
-if [ ! -d "$SOURCEDIR_HOME" ]; then
-    echo "WARNING: .JAS3 not found at $SOURCEDIR_HOME"
-fi
-
-rm -f .jarlist
-touch .jarlist
-find $SOURCEDIR_JAS3 -name "*.jar" -follow >> .jarlist
-find $SOURCEDIR_HOME -name "*.jar" -follow | grep -v "extensions" >> .jarlist
-
-rm -rf .workdir
-mkdir -p .workdir
-cp $HOME/.JAS3/extensions/*jar .workdir
-cp $HOME/.JAS3/extensions/snapshot.tar.gz .workdir
-find .workdir -name "*.jar" >> .jarlist

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
java-init.sh removed after 1.1
diff -N java-init.sh
--- java-init.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,21 +0,0 @@
-#! /bin/sh
-
-# For a linux machine at SLAC:
-SOURCEDIR_JAS3=/afs/slac.stanford.edu/package/jas/release/jas3/jas3-0.8.3
-
-if [ ! -d "$SOURCEDIR_JAS3" ]; then
-    echo "WARNING: jas3 not found at $SOURCEDIR_JAS3"
-fi
-
-# For a unix machine in general:
-SOURCEDIR_HOME=$HOME/.JAS3
-
-if [ ! -d "$SOURCEDIR_HOME" ]; then
-    echo "WARNING: .JAS3 not found at $SOURCEDIR_HOME"
-fi
-
-rm -f .jarlist
-touch .jarlist
-find $SOURCEDIR_JAS3 -name "*.jar" -follow >> .jarlist
-find $SOURCEDIR_HOME -name "*.jar" -follow >> .jarlist
-

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
java-run.sh removed after 1.1
diff -N java-run.sh
--- java-run.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,17 +0,0 @@
-#!/bin/bash
-
-echo Started...
-
-source exportCLASSPATH.sh
-
-SKIP=$1
-NUM=$2
-SOURCE=`cat $3`
-CLASSNAME=$4
-
-echo "SOURCE is '$SOURCE'"
-echo "CLASSPATH is $CLASSPATH"
-
-echo Start MainLoop...
-java -classpath $CLASSPATH -Xmx2048m org.lcsim.contrib.uiowa.MainLoop $SKIP $NUM $CLASSNAME $SOURCE
-echo Done

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
macro-run.sh removed after 1.1
diff -N macro-run.sh
--- macro-run.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,9 +0,0 @@
-#!/bin/bash
-
-if [ $# -ne 1 ] ; then
-    echo "Usage: `basename $0` <className>"
-fi
-
-source exportCLASSPATH.sh
-
-java -classpath $CLASSPATH $1 $@

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
mergeAIDAFiles.sh removed after 1.1
diff -N mergeAIDAFiles.sh
--- mergeAIDAFiles.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-./macro-run.sh org.lcsim.contrib.uiowa.uiowapfa.macros.Merge $@

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
mergeLikelihoodFiles.sh removed after 1.1
diff -N mergeLikelihoodFiles.sh
--- mergeLikelihoodFiles.sh	27 May 2011 14:14:04 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-./macro-run.sh org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.MergeLikelihoodFiles $@
-
-

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
normalize.sh removed after 1.1
diff -N normalize.sh
--- normalize.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-./macro-run.sh org.lcsim.contrib.uiowa.uiowapfa.macros.Normalize $@

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
plotLikelihoods.sh removed after 1.1
diff -N plotLikelihoods.sh
--- plotLikelihoods.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-./macro-run.sh org.lcsim.contrib.uiowa.uiowapfa.recon.cluster.structural.likelihood.PlotLikelihoods $@

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
run-pfa.sh removed after 1.1
diff -N run-pfa.sh
--- run-pfa.sh	27 May 2011 12:01:14 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-if [ $# -ne 3 ]
-then
-  echo "Usage: `basename $0` <skip events> <max events> <input list>"
-  exit
-fi
-
-./java-run.sh $1 $2 $3 org.lcsim.contrib.uiowa.uiowapfa.recon.pfa.structural.RunAndWriteOutPFAFullTracking
-

lcsim-contrib/src/main/java/org/lcsim/contrib/uiowa/uiowapfa/scripts
submit.sh removed after 1.1
diff -N submit.sh
--- submit.sh	27 May 2011 13:08:48 -0000	1.1
+++ /dev/null	1 Jan 1970 00:00:00 -0000
@@ -1,106 +0,0 @@
-#!/bin/csh
-
-##-- This is input section
-##-- Default values are defined here
-##-- All values can be changed from command line
-@ eventsPerJob = 50
-@ eventsPerFile = 500
-set nfshome = /nfs/slac/g/uiowa/u01/zaidan/batch/joboutputs
-set workdir = $nfshome/testJob
-set queue = long # xlong # xxl # 
-set inputList = list
-###############################
-
-set scriptname = $0
-set label = main
-goto $label
-
-printHelp:
-    echo ""
-    echo "Usage $scriptname [options [option arguments]] [workdir]"
-    echo ""
-    echo "  [workdir] is the path where the job should run relative to the nfs home specified by the --nfs option"
-    echo "  Options are:"
-    echo "    -h,--help: prints this help message and exits"
-    echo "    -n,--eventsPerJob number: number of events per job [default: $eventsPerJob]"
-    echo "    -N,--eventsPerFile number: number of events per file [default: $eventsPerFile]"
-    echo "    --nfs path: absolute path to the nfs home [default: $nfshome]"
-    echo "    -q,--queue queueName: name of the batch queue where to send the job [default: $queue]"
-    echo "    -i,--input file: text file containing a list of input files [default: $inputList]"
-    echo ""
-    echo "  N.B.: It is important that the [workdir] argument is passed after setting the --nfs option"
-    echo ""
-    goto $label
-
-main:
-
-set workdirSet = false
-while ($#argv != 0)
-    set arg = $1
-    if( "$arg" == "--help" || "$arg" == "-h" ) then
-	set label = done
-	goto printHelp
-    endif
-    if( "$arg" == "--eventsPerJob" || "$arg" == "-n" ) then
-	shift
-	@ eventsPerJob = $1
-    else if( "$arg" == "--eventsPerFile" || "$arg" == "-N" ) then
-	shift
-	@ eventsPerFile = $1
-    else if( "$arg" == "--nfs" ) then
-	shift
-	if( $workdirSet == true) then
-	    echo "Warning: the --nfs option should be set before the [workdir] argument: option ignored"
-	    echo "         type $scriptname --help for more info"
-	else
-	    set nfshome = $1
-	endif
-    else if( "$arg" == "--queue" || "$arg" == "-q" ) then
-	shift
-	set queue = $1
-    else if( "$arg" == "--input" || "$arg" == "-i" ) then
-	shift
-	set inputList = $1
-    else
-	set workdir = $nfshome/$1
-	set workdirSet = true
-    endif
-    shift
-end
-
-set curdir = `pwd`
-
-rm -rf $workdir
-mkdir -p $workdir
-cp java-init-cache.sh $workdir
-cd $workdir
-./java-init-cache.sh
-cd $curdir
-
-@ i = 0
-@ jobsPerFile = $eventsPerFile / $eventsPerJob
-
-foreach f (`cat $inputList`)
-    @ j = 0
-    @ skip = 0
-    while($j < $jobsPerFile)
-	set jobdir = $workdir/$i
-	rm -rf $jobdir
-	mkdir -p $jobdir
-	cp exportCLASSPATH.sh $jobdir
-	cp java-run.sh $jobdir
-	cp run-pfa.sh $jobdir
-	ln -s $workdir/.jarlist $jobdir/.jarlist
-	ln -s $workdir/.workdir $jobdir/.workdir
-	echo $f > $jobdir/list
-	echo "cd $jobdir; ./run-pfa.sh $skip $eventsPerJob list | grep -v WARNING;" > $jobdir/run.sh
-	chmod a+x $jobdir/run.sh
-	bsub -q $queue -R linux -J $1_$i -o $jobdir/job.log $jobdir/run.sh
-	@ skip = $skip + $eventPerJob
-	@ i = $i + 1
-	@ j = $j + 1
-    end
-end
-
-done:
-    exit
CVSspam 0.2.12


Use REPLY-ALL to reply to list

To unsubscribe from the LCD-CVS list, click the following link:
https://listserv.slac.stanford.edu/cgi-bin/wa?SUBED1=LCD-CVS&A=1