Commit in lcsim/src/org/lcsim/contrib/uiowa/structural on MAIN
AssociateChargedTracks.java+44added 1.1
CheatFragmentIdentifier.java+91added 1.1
CheatFragmentMerger.java+114added 1.1
CheckStatusOfClusterList.java+99added 1.1
CheckStatusOfHitList.java+43added 1.1
EventEnergySum.java+565added 1.1
FragmentIdentifier.java+20added 1.1
FragmentMerger.java+132added 1.1
FragmentRemover.java+51added 1.1
HaloAssigner.java+117added 1.1
MakeHelixSwimmersFromTruth.java+85added 1.1
MakeSeparatedClusters.java+79added 1.1
MapClusterToExtrapolationInfo.java+12added 1.1
MapClusterToListOfExtrapolationInfo.java+12added 1.1
+1464
14 added files
mass update

lcsim/src/org/lcsim/contrib/uiowa/structural
AssociateChargedTracks.java added at 1.1
diff -N AssociateChargedTracks.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ AssociateChargedTracks.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,44 @@
+package structural; // package org.lcsim.recon.cluster.structural;
+
+import java.lang.String;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Vector;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Track;
+import org.lcsim.event.Cluster;
+
+/** 
+ * 
+ */
+
+public class AssociateChargedTracks extends Driver
+{
+    public AssociateChargedTracks(String inputClusterListName, String trackAssociationMapName)
+    {
+	m_inputClusterListName = inputClusterListName;
+	m_trackAssociationMapName = trackAssociationMapName;
+    }
+
+    public void process(EventHeader event) 
+    {
+	// Read in the clusters:
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+	// This will be the output map:
+	MapClusterToTrack outputTrackMap = new MapClusterToTrack();
+	// Which tracks are associated to which clusters?
+
+	// ...
+
+	// Write out:
+	List<MapClusterToTrack> outputTrackMapList = new Vector<MapClusterToTrack>();
+	outputTrackMapList.add(outputTrackMap);
+	event.put(m_trackAssociationMapName, outputTrackMapList);
+    }
+
+    protected String m_inputClusterListName;
+    protected String m_trackAssociationMapName;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
CheatFragmentIdentifier.java added at 1.1
diff -N CheatFragmentIdentifier.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheatFragmentIdentifier.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,91 @@
+package structural;
+
+import java.util.List;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.SimCalorimeterHit;
+
+
+import util.HitCountAssociator;
+
+public class CheatFragmentIdentifier implements FragmentIdentifier
+{
+    public CheatFragmentIdentifier(String clusterListName)
+    {
+	m_clusterListName = clusterListName;
+    }
+
+    public boolean isFragment(Cluster clus, EventHeader event)
+    {
+	List<Cluster> clusterList = event.get(Cluster.class, m_clusterListName);
+	if (! clusterList.contains(clus)) { 
+	    throw new AssertionError("Cluster not in list");
+	}
+
+	//System.out.println("DEBUG: Cluster list of size "+clusterList.size()+" contains cluster ["+clus+"]");
+	//for (Cluster tmpClus : clusterList) {
+	//System.out.println("DEBUG: Cluster list entry: ["+tmpClus+"] with "+tmpClus.getCalorimeterHits().size()+" hits");
+	//}
+
+	HitCountAssociator assoc = new HitCountAssociator(event);
+	assoc.setClusters(clusterList);
+
+	// Which is the dominant MC particle?
+	MCParticle testAlphaParticle = assoc.associateClusterToMCParticles(clus).iterator().next();
+	MCParticle alphaParticle = null;
+	int alphaHits = 0;
+	for (MCParticle part : assoc.associateClusterToMCParticles(clus)) {
+	    int countHits = countHits(clus, part);
+	    if (countHits <= 0) { throw new AssertionError("boom"); }
+	    if (countHits > alphaHits || alphaParticle == null) {
+		alphaParticle = part;
+		alphaHits = countHits;
+	    }
+	}
+	if (testAlphaParticle != alphaParticle) { 
+	    System.out.println("BUG: Dumping associated MC particles in order:");
+	    for (MCParticle tmpPart : assoc.associateClusterToMCParticles(clus)) {
+		System.out.println("BUG:     ["+tmpPart+"]: "+tmpPart.getType().getName()+" with energy "+tmpPart.getEnergy()+" and relevant hits = "+countHits(clus, tmpPart));
+	    }
+	    throw new AssertionError("BUG! Compare alphaParticle ["+alphaParticle+"] which is "+alphaParticle.getType().getName()+" with energy "+alphaParticle.getEnergy()+" vs testAlphaParticle ["+testAlphaParticle+"] which is "+testAlphaParticle.getType().getName()+" with energy "+testAlphaParticle.getEnergy()+". alphaHits="+alphaHits+", but hits from testAlphaParticle are "+countHits(clus, testAlphaParticle)) ; 
+	}
+
+	// OK, now is there another cluster to which it contributes more hits?
+	for (Cluster otherClus : clusterList) {
+	    int hitCount = countHits(otherClus, alphaParticle);
+	    if (hitCount > alphaHits && otherClus != clus) {
+		//System.out.println("DEBUG: CheatFragmentIdentifier: cluster ["+clus+"] with "+alphaHits+" hits is a fragment (alpha is "+alphaParticle.getType().getName()+" with energy "+alphaParticle.getEnergy()+"; beaten by a cluster ["+otherClus+"] with "+hitCount+" hits)");
+		return true; // fragment
+	    } else if (hitCount <= alphaHits && otherClus != clus) {
+		if (hitCount>0) {
+		    //System.out.println("DEBUG: CheatFragmentIdentifier: found another cluster with same alpha particle, but fewer (or equal) hits ("+hitCount+" vs "+alphaHits+")");
+		}
+	    }
+	}
+
+	// ... no, there isn't.
+	//System.out.println("CheatFragmentIdentifier: cluster is not a fragment -- alpha particle is "+alphaParticle.getType().getName()+" with energy "+alphaParticle.getEnergy()+". No other cluster in a list of "+clusterList.size()+" matched better.");
+	return false; // not a fragment.
+    }
+
+    protected int countHits (Cluster clus, MCParticle part)
+    {
+	int count = 0;
+	for (CalorimeterHit hit : clus.getCalorimeterHits()) {
+	    SimCalorimeterHit simHit = (SimCalorimeterHit) hit;
+	    int nContributingParticles = simHit.getMCParticleCount();
+	    for (int i=0; i<nContributingParticles; i++) {
+		MCParticle hitPart = simHit.getMCParticle(i);
+		if (part == hitPart) {
+		    count++;
+		}
+	    }
+	}
+	return count;
+    }
+
+    protected String m_clusterListName;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
CheatFragmentMerger.java added at 1.1
diff -N CheatFragmentMerger.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheatFragmentMerger.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,114 @@
+package structural;
+
+import java.util.List;
+import java.util.Vector;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.recon.cluster.util.BasicCluster;
+import org.lcsim.event.MCParticle;
+
+import util.EnergyAssociator;
+import util.HitCountAssociator;
+
+public class CheatFragmentMerger extends FragmentMerger
+{
+    public CheatFragmentMerger(String inputClusterListName, String outputClusterListName, FragmentIdentifier fragmentID)
+    {
+	super(inputClusterListName, outputClusterListName, fragmentID);
+    }
+
+    
+    protected BasicCluster findBestMerge(Cluster fragment, List<Cluster> nonFragments, List<Cluster> fragments)
+    {
+	// Match to a non-fragment
+	ClusterEnergyAssociator assoc = new ClusterEnergyAssociator();
+	for (Cluster nonFragment : nonFragments) {
+	    if (assoc.isLinkCorrect(fragment, nonFragment, m_event)) {
+		return (BasicCluster) nonFragment;
+	    }
+	}
+
+	HitCountAssociator assocE = new HitCountAssociator(m_event);
+	assocE.setClusters(nonFragments);
+
+	// Hm, OK. What's our dominant particle?
+	List<MCParticle> tmpAssocMC = assocE.associateClusterToMCParticles(fragment);
+	if (tmpAssocMC.size()>0) {
+	    MCParticle alpha = tmpAssocMC.iterator().next();
+	    List<Cluster> tmpAssocClus = assocE.associateMCParticleToClusters(alpha);
+	    if (tmpAssocClus.size()>0) {
+		Cluster match = tmpAssocClus.iterator().next();
+		if (match == fragment) { throw new AssertionError("BUG"); }
+		if (match == null) { 
+		    throw new AssertionError("BUG");
+		} else {
+		    return (BasicCluster) match;
+		}
+	    } else {
+		// No good match in non-fragments. This can happen in the following kind of case:
+		//   Cluster A has  2 hits from particle X
+		//   Cluster B has  3 hits from particle X and 6 hits from particle Y
+		//   Cluster C has 30 hits from particle Y
+		// In this example, the dominant cluster for particle X is still marked as a fragment,
+		// so we don't find a non-fragment to match cluster A up with. So what we do is look
+		// for the best non-fragment, then hook cluster A up with whatever is the best merge
+		// for the dominant non-fragment. (Need to watch out for a circular loop here.)
+		// System.out.println("DEBUG: Didn't find a match in non-fragments for cluster ["+fragment+"] with "+fragment.getCalorimeterHits().size()+" hits. There are "+nonFragments.size()+" non-fragments and "+fragments.size()+" fragments to consider. Looking at fragments...");
+		HitCountAssociator assocE2 = new HitCountAssociator(m_event);
+		assocE2.setClusters(fragments);
+		List<MCParticle> tmpAssocMC2 = assocE2.associateClusterToMCParticles(fragment);
+		if (tmpAssocMC2.size()>0) {
+		    MCParticle alpha2 = tmpAssocMC2.iterator().next();
+		    List<Cluster> tmpAssocClus2 = assocE2.associateMCParticleToClusters(alpha);
+		    if (tmpAssocClus2.size()>0) {
+			Cluster match = tmpAssocClus2.iterator().next();
+			if (match == fragment) { throw new AssertionError("BUG"); }
+			if (match==null) {
+			    throw new AssertionError("BUG");
+			} else {
+			    // Found the best fragment -- check what it's linked to:
+			    // System.out.println("DEBUG: Found a matching fragment for cluster ["+fragment+"] with "+fragment.getCalorimeterHits().size()+" hits: ["+match+"] with "+match.getCalorimeterHits().size()+" hits. Look up its associated cluster:");
+			    BasicCluster matchParent = findBestMerge(match, nonFragments, fragments);
+			    // System.out.println("DEBUG: Found a matching fragment for cluster ["+fragment+"] with "+fragment.getCalorimeterHits().size()+" hits: ["+match+"] with "+match.getCalorimeterHits().size()+" hits. Associated cluster/parent is ["+matchParent+"] with "+matchParent.getCalorimeterHits().size()+" hits.");
+			    return matchParent;
+			}
+		    }
+		}
+	    }
+	} else {
+	    throw new AssertionError("No MC particles contribute to cluster!");
+	}
+
+	/*
+	  System.out.println("DEBUG: First pass found no good matches for fragment ["+fragment+"] with "+fragment.getCalorimeterHits().size()+" hits.");
+	  System.out.println("DEBUG: Dumping contributing MC particles:");
+	  List<MCParticle> particles = assocE.associateClusterToMCParticles(fragment);
+	  for (MCParticle part : particles) {
+	  System.out.println("DEBUG:     "+part.getType().getName()+" with energy "+part.getEnergy());
+	  boolean flag = false;
+	  for (Cluster otherClus : nonFragments) {
+	  List<MCParticle> particles2 = assocE.associateClusterToMCParticles(otherClus);
+	  if (particles2.contains(part)) {
+	  MCParticle reverseAssoc = particles2.iterator().next();
+	  System.out.println("DEBUG:     Cluster ["+otherClus+"] with "+otherClus.getCalorimeterHits().size()+" contains the "+part.getType().getName()+" with energy "+part.getEnergy()+" but primary assoc is to "
+	  +reverseAssoc.getType().getName()+" with energy "+reverseAssoc.getEnergy());
+	  
+	  flag = true;
+	  }
+	  }
+	  if (flag = false) {
+	  System.out.println("DEBUG:     ... not contained by any other clusters.");
+	  }
+	  }
+	*/
+
+
+	//throw new AssertionError("["+fragment+"] with alpha particle "+alpha.getType().getName()+" of energy "+alpha.getEnergy()+" is a fragment, but nothing matches! There were "+nonFragments.size()+" non-fragments and "+fragments.size()+" fragments.");
+
+	return null;
+    }
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
CheckStatusOfClusterList.java added at 1.1
diff -N CheckStatusOfClusterList.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheckStatusOfClusterList.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,99 @@
+package structural; // package org.lcsim.recon.cluster.structural;
+
+import java.util.List;
+import java.util.Vector;
+import java.util.Map;
+import java.util.HashMap;
+import java.lang.String;
+import java.util.Set;
+import java.util.HashSet;
+
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Vector;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.Track;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.CalorimeterHit;
+
+public class CheckStatusOfClusterList extends Driver
+{
+    public CheckStatusOfClusterList(String inputClusterListName, List<String> knownClusterLists)
+    {
+	m_inputClusterListName = inputClusterListName;
+	m_knownClusterLists = knownClusterLists;
+    }
+
+    public void process(EventHeader event) 
+    {
+	// Read in the clusters:
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+
+	int hitSum = 0;
+	double energySum = 0.0;
+	double hitRawEnergySum = 0.0;
+	double hitCorrectedEnergySum = 0.0;
+	Set<CalorimeterHit> usedHits = new HashSet<CalorimeterHit>();
+	Set<CalorimeterHit> duplicateHits = new HashSet<CalorimeterHit>();
+	Map<CalorimeterHit, List<Cluster>> duplicateHitOrigins = new HashMap<CalorimeterHit, List<Cluster>>();
+	for (Cluster currentCluster : inputClusterList) {
+	    hitSum += currentCluster.getCalorimeterHits().size();
+	    energySum += currentCluster.getEnergy();
+	    for (CalorimeterHit hit : currentCluster.getCalorimeterHits()) {
+		hitCorrectedEnergySum += hit.getCorrectedEnergy();
+		hitRawEnergySum += hit.getRawEnergy();
+		if (usedHits.contains(hit)) {
+		    duplicateHits.add(hit);
+		} else {
+		    usedHits.add(hit);
+		}
+	    }
+	}
+	System.out.println("For cluster list ["+m_inputClusterListName+"]:"
+			   +" clusters="+inputClusterList.size()
+			   +" hits="+hitSum
+			   +" energySum="+energySum
+			   +" hitCorrectedEnergySum="+hitCorrectedEnergySum
+			   +" hitRawEnergySum="+hitRawEnergySum);
+
+	if (duplicateHits.size()>0) {
+	    System.out.println("There were "+duplicateHits.size()+" duplicate hits...");
+
+	    for (Cluster currentCluster : inputClusterList) {
+		for (CalorimeterHit hit : currentCluster.getCalorimeterHits()) {
+		    if (duplicateHits.contains(hit)) {
+			List<Cluster> test = duplicateHitOrigins.get(hit);
+			if (test == null) { test = new Vector<Cluster>(); duplicateHitOrigins.put(hit, test); }
+			test.add(currentCluster);
+		    }
+		}
+	    }
+	    
+	    // All known cluster lists:
+	    Map<List<Cluster>, String> knownClusterListMap = new HashMap<List<Cluster>, String>();
+	    for (String knownClusterListName : m_knownClusterLists) {
+		List<Cluster> knownClusterList = event.get(Cluster.class, knownClusterListName);
+		knownClusterListMap.put(knownClusterList, knownClusterListName);
+	    }
+	    Set<List<Cluster>> knownClusterListSet = knownClusterListMap.keySet();
+	    
+	    for (CalorimeterHit hit : duplicateHits) {
+		for (Cluster cluster : duplicateHitOrigins.get(hit)) {
+		    String clusterContainers = new String();
+		    for (List<Cluster> knownClusterList : knownClusterListSet) {
+			if (knownClusterList.contains(cluster)) {
+			    String clusterListName = knownClusterListMap.get(knownClusterList);
+			    clusterContainers += clusterListName + "; ";
+			}
+		    }
+		    System.out.println("Hit ["+hit+"] <--- cluster ["+cluster+"] with "+cluster.getCalorimeterHits().size()+" hits, in "+clusterContainers);
+		}
+	    }
+	}
+
+    }
+    String m_inputClusterListName;
+    List<String> m_knownClusterLists;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
CheckStatusOfHitList.java added at 1.1
diff -N CheckStatusOfHitList.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ CheckStatusOfHitList.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,43 @@
+package structural; // package org.lcsim.recon.cluster.structural;
+
+import java.util.List;
+import java.util.Map;
+import java.lang.String;
+
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Vector;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.Track;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.CalorimeterHit;
+
+public class CheckStatusOfHitList extends Driver
+{
+    public CheckStatusOfHitList(String inputHitListName)
+    {
+	m_inputHitListName = inputHitListName;
+    }
+
+    public void process(EventHeader event) 
+    {
+	// Read in the clusters:
+	List<CalorimeterHit> inputHitList = event.get(CalorimeterHit.class, m_inputHitListName);
+
+	int hitSum = 0;
+	double hitRawEnergySum = 0.0;
+	double hitCorrectedEnergySum = 0.0;
+	for (CalorimeterHit hit : inputHitList) {
+	    hitSum += 1;
+	    hitCorrectedEnergySum += hit.getCorrectedEnergy();
+	    hitRawEnergySum += hit.getRawEnergy();
+	}
+	System.out.println("For hit list ["+m_inputHitListName+"]:"
+			   +" hits="+hitSum
+			   +" hitCorrectedEnergySum="+hitCorrectedEnergySum
+			   +" hitRawEnergySum="+hitRawEnergySum);
+    }
+    String m_inputHitListName;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
EventEnergySum.java added at 1.1
diff -N EventEnergySum.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ EventEnergySum.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,565 @@
+package structural; // package org.lcsim.recon.cluster.structural;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Vector;
+import java.lang.String;
+import java.io.IOException; 
+
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Vector;
+
+import hep.aida.ITree;
+import hep.aida.IAnalysisFactory; 
+import hep.aida.IHistogramFactory; 
+import hep.aida.IHistogram1D; 
+import hep.aida.ICloud1D;
+import hep.aida.ITuple;
+import hep.aida.ITupleFactory;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.Track;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.CalorimeterHit;
+
+import util.EnergyAssociator;
+import util.OldEnergyCalibration;
+import util.RonEnergyCalibration;
+
+/** 
+ * A simple class to print out the total energy in an event.
+ */
+
+public class EventEnergySum extends Driver
+{
+    public EventEnergySum(String inputClusterListName, String nameOfClusterToInfoMap, String nameOfHelixToClusterMap)
+    {
+	m_inputClusterListName = inputClusterListName;
+	m_nameOfClusterToInfoMap = nameOfClusterToInfoMap;
+	m_nameOfHelixToClusterMap = nameOfHelixToClusterMap;
+
+	IAnalysisFactory af = IAnalysisFactory.create();
+	try {
+            m_tree = af.createTreeFactory().create("EnergySumHistos.aida","xml",false,true); 
+	    m_histoFactory = af.createHistogramFactory(m_tree); 
+	    m_hHadHitEnergy = m_histoFactory.createHistogram1D("hHadHitEnergy", 1000, 0.0, 0.00005);
+	    m_hHadEndcapHitEnergy = m_histoFactory.createHistogram1D("hHadEndcapHitEnergy", 1000, 0.0, 0.00005);
+	    m_hHadBarrelHitEnergy = m_histoFactory.createHistogram1D("hHadBarrelHitEnergy", 1000, 0.0, 0.00005);
+	    m_hTotalEnergyRon = m_histoFactory.createCloud1D("hTotalEnergyRon");
+	    m_hTotalEnergyOld = m_histoFactory.createCloud1D("hTotalEnergyOld");
+	    m_hTotalEnergyRawRon = m_histoFactory.createCloud1D("hTotalEnergyRawRon");
+	    m_hTotalEnergyRawOld = m_histoFactory.createCloud1D("hTotalEnergyRawOld");
+	    m_hSampFrac = m_histoFactory.createCloud1D("hSampFrac");
+	    m_hHadTime = m_histoFactory.createCloud1D("hHadTime");
+	    m_hECAL_charged_track_to_track     = m_histoFactory.createCloud1D("hECAL_charged_track_to_track");
+	    m_hECAL_charged_track_to_notrack   = m_histoFactory.createCloud1D("hECAL_charged_track_to_notrack");
+	    m_hECAL_charged_notrack_to_track   = m_histoFactory.createCloud1D("hECAL_charged_notrack_to_track");
+	    m_hECAL_charged_notrack_to_notrack = m_histoFactory.createCloud1D("hECAL_charged_notrack_to_notrack");
+	    m_hHCAL_charged_track_to_track     = m_histoFactory.createCloud1D("hHCAL_charged_track_to_track");
+	    m_hHCAL_charged_track_to_notrack   = m_histoFactory.createCloud1D("hHCAL_charged_track_to_notrack");
+	    m_hHCAL_charged_notrack_to_track   = m_histoFactory.createCloud1D("hHCAL_charged_notrack_to_track");
+	    m_hHCAL_charged_notrack_to_notrack = m_histoFactory.createCloud1D("hHCAL_charged_notrack_to_notrack");
+	    m_hECAL_neutral_to_track           = m_histoFactory.createCloud1D("hECAL_neutral_to_track");
+	    m_hECAL_neutral_to_notrack         = m_histoFactory.createCloud1D("hECAL_neutral_to_notrack");
+	    m_hHCAL_neutral_to_track           = m_histoFactory.createCloud1D("hHCAL_neutral_to_track");
+	    m_hHCAL_neutral_to_notrack         = m_histoFactory.createCloud1D("hHCAL_neutral_to_notrack");
+
+	    m_hECAL_charged_track_fractionbad   = m_histoFactory.createCloud1D("hECAL_charged_track_fractionbad");
+	    m_hECAL_charged_notrack_fractionbad = m_histoFactory.createCloud1D("hECAL_charged_notrack_fractionbad");
+	    m_hECAL_neutral_fractionbad         = m_histoFactory.createCloud1D("hECAL_neutral_fractionbad");
+	    m_hHCAL_charged_track_fractionbad   = m_histoFactory.createCloud1D("hHCAL_charged_track_fractionbad");
+	    m_hHCAL_charged_notrack_fractionbad = m_histoFactory.createCloud1D("hHCAL_charged_notrack_fractionbad");
+	    m_hHCAL_neutral_fractionbad         = m_histoFactory.createCloud1D("hHCAL_neutral_fractionbad");
+
+
+	    ITupleFactory tf = af.createTupleFactory(m_tree);
+	    m_hFractionChargedHitsBad = m_histoFactory.createCloud1D("hFractionChargedHitsBad");
+	    m_hFractionNeutralHitsBad = m_histoFactory.createCloud1D("hFractionNeutralHitsBad");
+	    m_tuple = tf.create("EnergySums", "sums", "double truth, recoRon, neutrinos, recoOld");
+	} catch (IOException ioe1) {
+            ioe1.printStackTrace(); 
+        }
+    }
+
+    ITree m_tree = null;
+    IHistogramFactory m_histoFactory = null;
+    IHistogram1D m_hHadHitEnergy;
+    IHistogram1D m_hHadEndcapHitEnergy;
+    IHistogram1D m_hHadBarrelHitEnergy;
+    ICloud1D m_hSampFrac;
+    ICloud1D m_hTotalEnergyRon;
+    ICloud1D m_hTotalEnergyOld;
+    ICloud1D m_hTotalEnergyRawRon;
+    ICloud1D m_hTotalEnergyRawOld;
+    ICloud1D m_hHadTime;
+    ICloud1D m_hFractionChargedHitsBad;
+    ICloud1D m_hFractionNeutralHitsBad;
+    ITuple   m_tuple;
+
+    ICloud1D m_hECAL_charged_track_to_track     ;
+    ICloud1D m_hECAL_charged_track_to_notrack   ;
+    ICloud1D m_hECAL_charged_notrack_to_track   ;
+    ICloud1D m_hECAL_charged_notrack_to_notrack ;
+    ICloud1D m_hHCAL_charged_track_to_track     ;
+    ICloud1D m_hHCAL_charged_track_to_notrack   ;
+    ICloud1D m_hHCAL_charged_notrack_to_track   ;
+    ICloud1D m_hHCAL_charged_notrack_to_notrack ;
+    ICloud1D m_hECAL_neutral_to_track           ;
+    ICloud1D m_hECAL_neutral_to_notrack         ;
+    ICloud1D m_hHCAL_neutral_to_track           ;
+    ICloud1D m_hHCAL_neutral_to_notrack         ;
+
+    ICloud1D m_hECAL_charged_track_fractionbad   ;
+    ICloud1D m_hECAL_charged_notrack_fractionbad ;
+    ICloud1D m_hECAL_neutral_fractionbad         ;
+    ICloud1D m_hHCAL_charged_track_fractionbad   ;
+    ICloud1D m_hHCAL_charged_notrack_fractionbad ;
+    ICloud1D m_hHCAL_neutral_fractionbad         ;
+
+    public void suspend() {
+	try {
+	    m_tree.commit();
+	} catch(IOException ioe1) {
+            ioe1.printStackTrace(); 
+        }
+	super.suspend();
+    }
+
+    public void process(EventHeader event) 
+    {
+	m_event = event;
+
+	// Read in the clusters:
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+	// Read in the track association map:
+	Map<Cluster, List<TrackExtrapolationInfo>> trackMap = null;
+	{
+	    List<Object> trackMapObjectList = event.get(Object.class, m_nameOfClusterToInfoMap);
+	    Object trackMapObject = trackMapObjectList.iterator().next();
+	    trackMap = (Map<Cluster, List<TrackExtrapolationInfo>>) (trackMapObject);
+	}
+	Map<TrackExtrapolationInfo, Cluster> helixMap = null;
+	{
+	    List<Object> trackMapObjectList = event.get(Object.class, m_nameOfHelixToClusterMap);
+	    Object trackMapObject = trackMapObjectList.iterator().next();
+	    helixMap = (Map<TrackExtrapolationInfo, Cluster>) (trackMapObject);
+	}
+
+	EnergyAssociator assoc = new EnergyAssociator(event);
+
+	int totalChargedHitsSafe = 0;
+	int totalChargedHitsBad = 0;
+	int totalNeutralHitsSafe = 0;
+	int totalNeutralHitsBad = 0;
+
+	// Categories:
+	//   From a charged particle with a  track, in a cluster with a  track
+	//   From a charged particle with no track, in a cluster with a  track
+	//   From a charged particle with a  track, in a cluster with no track
+	//   From a charged particle with no track, in a cluster with no track
+	//   From a neutral particle, in a cluster with a  track
+	//   From a neutral particle, in a cluster with no track
+
+	double energyECAL_ChargedParticleWithTrack_ClusterWithTrack = 0.0;
+	double energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack = 0.0;
+	double energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack = 0.0;
+	double energyECAL_ChargedParticleWithNoTrack_ClusterWithNoTrack = 0.0;
+	double energyECAL_NeutralParticle_ClusterWithTrack = 0.0;
+	double energyECAL_NeutralParticle_ClusterWithNoTrack = 0.0;
+	int hitsHCAL_ChargedParticleWithTrack_ClusterWithTrack = 0;
+	int hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack = 0;
+	int hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack = 0;
+	int hitsHCAL_ChargedParticleWithNoTrack_ClusterWithNoTrack = 0;
+	int hitsHCAL_NeutralParticle_ClusterWithTrack = 0;
+	int hitsHCAL_NeutralParticle_ClusterWithNoTrack = 0;
+
+	int countChargedClusters = 0;
+	int countNeutralClusters = 0;
+	double energySumOldECAL = 0.0; // Neutral energy in ECAL (old)
+	double energySumOldHCAL = 0.0; // Neutral energy in HCAL (old)
+	double energySumRon = 0.0; // Neutral energy (ECAL + HCAL) (Ron)
+	double energySumCharged = 0.0;
+	double energySumRawChargedOld = 0.0; // Raw charged energy (old)
+	double energySumRawChargedRon = 0.0; // Raw charged energy (old)
+	int countFoundTracks = 0;
+	Set<TrackExtrapolationInfo> helixSet = helixMap.keySet();
+	for (Cluster currentCluster : inputClusterList) {
+	    // Is there an associated track?
+	    List<TrackExtrapolationInfo> associatedTracks = new Vector<TrackExtrapolationInfo>();
+
+	    /*
+	      List<TrackExtrapolationInfo> associatedTrackList = trackMap.get(currentCluster);
+	      if (associatedTrackList != null) { associatedTracks.addAll(associatedTrackList); }
+	    */
+
+	    for (TrackExtrapolationInfo helix : helixSet) {
+		CalorimeterHit anchor = helix.getHit();
+		List<CalorimeterHit> hitsInCluster = currentCluster.getCalorimeterHits();
+		if (hitsInCluster.contains(anchor)) {
+		    associatedTracks.add(helix);
+		}
+	    }
+
+	    if (associatedTracks.size() > 0) {
+		// There are associated track(s)
+		countChargedClusters++;
+		countFoundTracks += associatedTracks.size();
+		double sumEnergyOfThisCluster = 0.0;
+		double sumCrudeEnergyOfThisCluster = 0.0;
+		for (TrackExtrapolationInfo associatedTrack : associatedTracks) {
+		    MCParticle part = associatedTrack.getMCParticle();
+		    double mass = part.getMass();
+		    Hep3Vector momentumVector = associatedTrack.getMomentum();
+		    double momentumSquared = momentumVector.magnitudeSquared();
+		    double energy = Math.sqrt(mass*mass + momentumSquared);
+		    energySumCharged += energy;	
+		    sumEnergyOfThisCluster += energy;
+		    sumCrudeEnergyOfThisCluster += this.getClusterEnergy(currentCluster);
+		}
+		energySumRawChargedOld += this.getClusterEnergy(currentCluster, true, false); // farm out to an external class?
+		energySumRawChargedOld += this.getClusterEnergy(currentCluster, false, true); // farm out to an external class?
+		energySumRawChargedRon += this.getClusterEnergy(currentCluster);
+		{
+		    String infoDump = new String("DEBUG: "+this.getClass().getName()+": Cluster with "+currentCluster.getCalorimeterHits().size()+" hits mapped to "+associatedTracks.size()+" tracks (");
+		    for (TrackExtrapolationInfo info : associatedTracks) {
+			infoDump += info.getMCParticle().getType().getName();
+			infoDump += "(";
+			infoDump += debugSimulationStatus(info.getMCParticle());
+			infoDump += " from ";
+			infoDump += debugGetParents(info.getMCParticle());
+			if (info.getMCParticle().getSimulatorStatus().isDecayedInTracker()) {
+			    Hep3Vector endPoint = info.getMCParticle().getEndPoint();
+			    double r = Math.sqrt(endPoint.x()*endPoint.x() + endPoint.y()*endPoint.y());
+			    double z = endPoint.z();
+			    infoDump += ", endpoint r=" + r + ", z=" + z;
+			    List<MCParticle> daughters = info.getMCParticle().getDaughters();
+			    for (MCParticle dau : daughters) {
+				endPoint = dau.getOrigin();
+				r = Math.sqrt(endPoint.x()*endPoint.x() + endPoint.y()*endPoint.y());
+				z = endPoint.z();
+				infoDump += ", dauOrig r=" + r + ", z=" + z;
+			    }
+			}
+			infoDump += ") ";
+		    }
+		    infoDump += "). Crude energy="+sumCrudeEnergyOfThisCluster+"; track energy="+sumEnergyOfThisCluster;
+		    System.out.println(infoDump);
+		    String truthDump = new String("DEBUG: "+this.getClass().getName()+": Here are all contributing particles: ");
+		    List<MCParticle> contributingParticles = assoc.associateClusterToMCParticles(currentCluster);
+		    int hitCountChargedButNoTrack = 0;
+		    int hitCountChargedWithTrack = 0;
+		    int hitCountNeutral = 0;
+
+		    for (MCParticle truthPart : contributingParticles) {
+			if (Math.abs(truthPart.getCharge())>0.5) {
+			    // charged
+			    boolean matchedTrack = false;
+			    for (TrackExtrapolationInfo helix : helixSet) {
+				if (helix.getMCParticle() == truthPart && helix.getHit() != null) {
+				    matchedTrack = true;
+				    break;
+				}
+			    }
+			    if (matchedTrack) {
+				totalChargedHitsSafe        += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+				hitCountChargedWithTrack    += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+
+				energyECAL_ChargedParticleWithTrack_ClusterWithTrack += MiscUtilities.correctedEnergyInClusterECAL(truthPart, currentCluster);
+				hitsHCAL_ChargedParticleWithTrack_ClusterWithTrack   += MiscUtilities.countHitsInClusterHCAL(truthPart, currentCluster);
+			    } else {
+				hitCountChargedButNoTrack   += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+				totalChargedHitsBad         += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+
+				energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack += MiscUtilities.correctedEnergyInClusterECAL(truthPart, currentCluster);
+				hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack   += MiscUtilities.countHitsInClusterHCAL(truthPart, currentCluster);
+			    }
+			} else {
+			    // neutral
+			    hitCountNeutral          += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+			    totalChargedHitsBad      += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+
+			    energyECAL_NeutralParticle_ClusterWithTrack += MiscUtilities.correctedEnergyInClusterECAL(truthPart, currentCluster);
+			    hitsHCAL_NeutralParticle_ClusterWithTrack   += MiscUtilities.countHitsInClusterHCAL(truthPart, currentCluster);
+			}
+			truthDump += truthPart.getType().getName();
+			truthDump += " (";
+			truthDump += truthPart.getEnergy();
+			truthDump += " -> ";
+			truthDump += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+			truthDump += "/";
+			truthDump += currentCluster.getCalorimeterHits().size();
+			truthDump += " -> ";
+			truthDump += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+			truthDump += "/";
+			truthDump += assoc.associateMCParticleToHits(truthPart).size();
+			truthDump += debugSimulationStatus(truthPart);
+			truthDump += " from ";
+			truthDump += debugGetParents(truthPart);
+			truthDump += ") ";
+		    }
+		    truthDump += " *** Hits: "+hitCountNeutral+" neutral, "+hitCountChargedButNoTrack+" charged(bad), "+hitCountChargedWithTrack+" charged(safe)";
+		    System.out.println(truthDump);
+		}
+	    } else {
+		// No track
+		countNeutralClusters++;
+		double energyOldECAL = this.getClusterEnergy(currentCluster, true, false);
+		double energyOldHCAL = this.getClusterEnergy(currentCluster, false, true);
+		energySumOldECAL += energyOldECAL;
+		energySumOldHCAL += energyOldHCAL;
+		energySumRon += this.getClusterEnergy(currentCluster);
+		System.out.println("DEBUG: "+this.getClass().getName()+": Neutral cluster with "+currentCluster.getCalorimeterHits().size()+" hits contributes "+this.getClusterEnergy(currentCluster)+" (ron) or "+energyOldECAL+" (old ECAL) + "+energyOldHCAL+" (old HCAL)");
+		String truthDump = new String("DEBUG: "+this.getClass().getName()+": Here are all contributing particles: ");
+		List<MCParticle> contributingParticles = assoc.associateClusterToMCParticles(currentCluster);
+		int hitCountChargedButNoTrack = 0;
+		int hitCountChargedWithTrack = 0;
+		int hitCountNeutral = 0;
+		double energyNeutralButNoTrackECAL = 0.0;
+		double energyChargedWithTrackECAL = 0.0;
+		double energyChargedNeutralECAL = 0.0;
+		int hitsChargedButNoTrackHCAL = 0;
+		int hitsChargedWithTrackHCAL = 0;
+		int hitsChargedNeutralHCAL = 0;
+		for (MCParticle truthPart : contributingParticles) {
+		    if (Math.abs(truthPart.getCharge())>0.5) {
+			// charged
+			boolean matchedTrack = false;
+			for (TrackExtrapolationInfo helix : helixSet) {
+			    if (helix.getMCParticle() == truthPart && helix.getHit() != null) {
+				matchedTrack = true;
+				break;
+			    }
+			}
+			if (matchedTrack) {
+			    hitCountChargedWithTrack += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+			    totalNeutralHitsBad += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+
+			    energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack += MiscUtilities.correctedEnergyInClusterECAL(truthPart, currentCluster);
+			    hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack   += MiscUtilities.countHitsInClusterHCAL(truthPart, currentCluster);
+			} else {
+			    hitCountChargedButNoTrack += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+			    totalNeutralHitsSafe += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+
+			    energyECAL_ChargedParticleWithNoTrack_ClusterWithNoTrack += MiscUtilities.correctedEnergyInClusterECAL(truthPart, currentCluster);
+			    hitsHCAL_ChargedParticleWithNoTrack_ClusterWithNoTrack   += MiscUtilities.countHitsInClusterHCAL(truthPart, currentCluster);
+			}
+		    } else {
+			// neutral
+			hitCountNeutral += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+			totalNeutralHitsSafe += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+
+			energyECAL_NeutralParticle_ClusterWithNoTrack += MiscUtilities.correctedEnergyInClusterECAL(truthPart, currentCluster);
+			hitsHCAL_NeutralParticle_ClusterWithNoTrack   += MiscUtilities.countHitsInClusterHCAL(truthPart, currentCluster);
+		    }
+		    truthDump += truthPart.getType().getName();
+		    truthDump += " (";
+		    truthDump += truthPart.getEnergy();
+		    truthDump += " -> ";
+		    truthDump += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+		    truthDump += "/";
+		    truthDump += currentCluster.getCalorimeterHits().size();
+		    truthDump += " -> ";
+		    truthDump += MiscUtilities.countHitsInCluster(truthPart, currentCluster);
+		    truthDump += "/";
+		    truthDump += assoc.associateMCParticleToHits(truthPart).size();
+		    truthDump += debugSimulationStatus(truthPart);
+		    truthDump += " from ";
+		    truthDump += debugGetParents(truthPart);
+		    truthDump += ")";
+		}
+		truthDump += " *** Hits: "+hitCountNeutral+" neutral, "+hitCountChargedButNoTrack+" charged(safe), "+hitCountChargedWithTrack+" charged(bad)";
+		System.out.println(truthDump);
+	    }
+	}
+
+	// Check neutrinos:
+        double truthNeutrinoEnergySum = 0.0;
+	double truthTotalEnergySum = 0.0;
+        List<MCParticle> eventMCParticles = event.getMCParticles();
+	MCParticle lastMCParticle = null;
+	for (MCParticle p : eventMCParticles) {
+            int pdg = p.getPDGID();
+            if (pdg==12 || pdg==14 || pdg==16 || pdg==18 || pdg==-12 || pdg==-14 || pdg==-16 || pdg==-18) {
+                truthNeutrinoEnergySum += p.getEnergy();
+            }
+	    lastMCParticle = p;
+        }
+
+	// done
+	double fracChargedHitsBad = ( (double)(totalChargedHitsBad) / (double)(totalChargedHitsSafe+totalChargedHitsBad));
+	double fracNeutralHitsBad = ( (double)(totalNeutralHitsBad) / (double)(totalNeutralHitsSafe+totalNeutralHitsBad));
+	System.out.println("In this event, charged hits: "+totalChargedHitsSafe+" safe and "+totalChargedHitsBad+" bad.");
+	System.out.println("In this event, neutral hits: "+totalNeutralHitsSafe+" safe and "+totalNeutralHitsBad+" bad.");
+	m_hFractionChargedHitsBad.fill(fracChargedHitsBad);
+	m_hFractionNeutralHitsBad.fill(fracNeutralHitsBad);
+	System.out.println("In this event, ECAL energy for charged particles with tracks: "
+			   +energyECAL_ChargedParticleWithTrack_ClusterWithTrack+" (particle with track -> cluster with track) "
+			   +energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack+" (particle with track -> cluster with no track) ");
+	System.out.println("In this event, ECAL energy for charged particles with no track: "
+			   +energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack+" (particle with no track -> cluster with track) "
+			   +energyECAL_ChargedParticleWithNoTrack_ClusterWithNoTrack+" (particle with no track -> cluster with no track) ");
+	System.out.println("In this event, ECAL energy for neutral particles: "
+			   +energyECAL_NeutralParticle_ClusterWithTrack+" (particle -> cluster with track) "
+			   +energyECAL_NeutralParticle_ClusterWithNoTrack+" (particle -> cluster with no track) ");
+	System.out.println("In this event, HCAL hits for charged particles with tracks: "
+			   +hitsHCAL_ChargedParticleWithTrack_ClusterWithTrack+" (particle with track -> cluster with track) "
+			   +hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack+" (particle with track -> cluster with no track) ");
+	System.out.println("In this event, HCAL hits for charged particles with no track: "
+			   +hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack+" (particle with no track -> cluster with track) "
+			   +hitsHCAL_ChargedParticleWithNoTrack_ClusterWithNoTrack+" (particle with no track -> cluster with no track) ");
+	System.out.println("In this event, HCAL hits for neutral particles: "
+			   +hitsHCAL_NeutralParticle_ClusterWithTrack+" (particle -> cluster with track) "
+			   +hitsHCAL_NeutralParticle_ClusterWithNoTrack+" (particle -> cluster with no track) ");
+	double badECAL_track_to_notrack = energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack;
+	double goodECAL_track_to_track = energyECAL_ChargedParticleWithTrack_ClusterWithTrack;
+	double badECAL_notrack_to_track = energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack + energyECAL_NeutralParticle_ClusterWithTrack;
+	double goodECAL_notrack_to_notrack = energyECAL_ChargedParticleWithNoTrack_ClusterWithNoTrack + energyECAL_NeutralParticle_ClusterWithNoTrack;
+	int badHCAL_track_to_notrack = hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack;
+	int goodHCAL_track_to_track = hitsHCAL_ChargedParticleWithTrack_ClusterWithTrack;
+	int badHCAL_notrack_to_track = hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack + hitsHCAL_NeutralParticle_ClusterWithTrack;
+	int goodHCAL_notrack_to_notrack = hitsHCAL_ChargedParticleWithNoTrack_ClusterWithNoTrack + hitsHCAL_NeutralParticle_ClusterWithNoTrack;
+	System.out.println("ECAL energy bad: "
+			   +badECAL_track_to_notrack+"/"+(badECAL_track_to_notrack+goodECAL_notrack_to_notrack)+" (seen as notrack) "
+			   +badECAL_notrack_to_track+"/"+(badECAL_notrack_to_track+goodECAL_track_to_track)+" (seen as track)");
+	System.out.println("HCAL hits bad: "
+			   +badHCAL_track_to_notrack+"/"+(badHCAL_track_to_notrack+goodHCAL_notrack_to_notrack)+" (seen as notrack) "
+			   +badHCAL_notrack_to_track+"/"+(badHCAL_notrack_to_track+goodHCAL_track_to_track)+" (seen as track)");
+	
+	
+	m_hECAL_charged_track_to_track     .fill(energyECAL_ChargedParticleWithTrack_ClusterWithTrack);
+	m_hECAL_charged_track_to_notrack   .fill(energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack);
+	m_hECAL_charged_notrack_to_track   .fill(energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack);
+	m_hECAL_charged_notrack_to_notrack .fill(energyECAL_ChargedParticleWithNoTrack_ClusterWithNoTrack);
+	m_hHCAL_charged_track_to_track     .fill(hitsHCAL_ChargedParticleWithTrack_ClusterWithTrack);
+	m_hHCAL_charged_track_to_notrack   .fill(hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack);
+	m_hHCAL_charged_notrack_to_track   .fill(hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack);
+	m_hHCAL_charged_notrack_to_notrack .fill(hitsHCAL_ChargedParticleWithNoTrack_ClusterWithNoTrack);
+	m_hECAL_neutral_to_track           .fill(energyECAL_NeutralParticle_ClusterWithTrack);
+	m_hECAL_neutral_to_notrack         .fill(energyECAL_NeutralParticle_ClusterWithNoTrack);
+	m_hHCAL_neutral_to_track           .fill(hitsHCAL_NeutralParticle_ClusterWithTrack);
+	m_hHCAL_neutral_to_notrack         .fill(hitsHCAL_NeutralParticle_ClusterWithNoTrack);
+
+	double fraction_ECAL_chargedtrack_bad = 0.0;
+	double fraction_ECAL_chargednotrack_bad = 0.0;
+	double fraction_ECAL_neutral_bad = 0.0;
+	double fraction_HCAL_chargedtrack_bad = 0.0;
+	double fraction_HCAL_chargednotrack_bad = 0.0;
+	double fraction_HCAL_neutral_bad = 0.0;
+
+	if (energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack>0.0) {
+	    fraction_ECAL_chargedtrack_bad = (energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack / (energyECAL_ChargedParticleWithTrack_ClusterWithTrack+energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack));
+	}
+	if (energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack>0.0) {
+	    fraction_ECAL_chargednotrack_bad =(energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack / (energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack+energyECAL_ChargedParticleWithNoTrack_ClusterWithNoTrack));
+	}
+	if (energyECAL_NeutralParticle_ClusterWithTrack>0.0) {
+	    fraction_ECAL_neutral_bad = (energyECAL_NeutralParticle_ClusterWithTrack / (energyECAL_NeutralParticle_ClusterWithTrack+energyECAL_NeutralParticle_ClusterWithNoTrack));
+	}
+	if (hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack > 0) {
+	    fraction_HCAL_chargedtrack_bad = ( ((double)(hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack)) / ((double)(hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack+hitsHCAL_ChargedParticleWithTrack_ClusterWithTrack)) );
+	}
+	if (hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack > 0) {
+	    fraction_HCAL_chargednotrack_bad = ( ((double)(hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack)) /  ((double)(hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack + hitsHCAL_ChargedParticleWithNoTrack_ClusterWithNoTrack)) );
+	}
+	if (hitsHCAL_NeutralParticle_ClusterWithTrack > 0) {
+	    fraction_HCAL_neutral_bad = ( ((double)(hitsHCAL_NeutralParticle_ClusterWithTrack)) /  ((double)(hitsHCAL_NeutralParticle_ClusterWithTrack + hitsHCAL_NeutralParticle_ClusterWithNoTrack)) );
+	}
+
+	m_hECAL_charged_track_fractionbad  .fill(fraction_ECAL_chargedtrack_bad);
+	m_hECAL_charged_notrack_fractionbad.fill(fraction_ECAL_chargednotrack_bad);
+	m_hECAL_neutral_fractionbad        .fill(fraction_ECAL_neutral_bad);
+	m_hHCAL_charged_track_fractionbad  .fill(fraction_HCAL_chargedtrack_bad);
+	m_hHCAL_charged_notrack_fractionbad.fill(fraction_HCAL_chargednotrack_bad);
+	m_hHCAL_neutral_fractionbad        .fill(fraction_HCAL_neutral_bad);
+
+	// Crosschecks
+	if ( Math.abs((badECAL_track_to_notrack+goodECAL_track_to_track+badECAL_notrack_to_track+goodECAL_notrack_to_notrack)-(energyECAL_ChargedParticleWithTrack_ClusterWithTrack+energyECAL_ChargedParticleWithTrack_ClusterWithNoTrack+energyECAL_ChargedParticleWithNoTrack_ClusterWithTrack+energyECAL_ChargedParticleWithNoTrack_ClusterWithNoTrack+energyECAL_NeutralParticle_ClusterWithTrack+energyECAL_NeutralParticle_ClusterWithNoTrack)) > 0.0001 ) { throw new AssertionError("bookkeeping"); }
+	if ( ((badHCAL_track_to_notrack+goodHCAL_track_to_track+badHCAL_notrack_to_track+goodHCAL_notrack_to_notrack)-(hitsHCAL_ChargedParticleWithTrack_ClusterWithTrack+hitsHCAL_ChargedParticleWithNoTrack_ClusterWithTrack+hitsHCAL_ChargedParticleWithTrack_ClusterWithNoTrack+hitsHCAL_ChargedParticleWithNoTrack_ClusterWithNoTrack+hitsHCAL_NeutralParticle_ClusterWithTrack+hitsHCAL_NeutralParticle_ClusterWithNoTrack)) != 0 ) { throw new AssertionError("bookkeeping"); }
+
+	double oldTotal = energySumCharged+energySumOldECAL+energySumOldHCAL+truthNeutrinoEnergySum;
+	double newTotal = energySumCharged+energySumRon+truthNeutrinoEnergySum;
+	System.out.println("Charged energy in event = "+energySumCharged+" from "+countFoundTracks+" tracks, "+countChargedClusters+" charged clusters.");
+	System.out.println("Neutral energy sum in event: old("+(energySumOldECAL+energySumOldHCAL)+"), new("+energySumRon+") and neutrinos="+truthNeutrinoEnergySum+" => total = old("+oldTotal+"), new("+newTotal+") from "+countNeutralClusters+" neutral clusters.");
+	double sampFracHCAL = (91.0 - energySumOldECAL - truthNeutrinoEnergySum - energySumCharged) / energySumOldHCAL;
+	//System.out.println("Sampling fraction in HCAL = "+sampFracHCAL);
+	m_hSampFrac.fill(sampFracHCAL);
+
+	m_hTotalEnergyRon.fill(newTotal);
+	m_hTotalEnergyOld.fill(oldTotal);
+	m_hTotalEnergyRawOld.fill(energySumRawChargedOld+energySumOldECAL+energySumOldHCAL+truthNeutrinoEnergySum);
+	m_hTotalEnergyRawRon.fill(energySumRawChargedRon+energySumRon+truthNeutrinoEnergySum);
+	m_tuple.fill(0, lastMCParticle.getEnergy());
+	m_tuple.fill(1, newTotal);
+	m_tuple.fill(2, truthNeutrinoEnergySum);
+	m_tuple.fill(3, oldTotal);
+	m_tuple.addRow();
+    }
+
+    protected double getClusterEnergy(Cluster clus, boolean useECAL, boolean useHCAL) 
+    {
+	// Use old calibration
+	OldEnergyCalibration cor = new OldEnergyCalibration();
+	if (useECAL && useHCAL) {
+	    return cor.energy(clus);
+	} else if (useECAL && !useHCAL) {
+	    return cor.energyECAL(clus);
+	} else if (useHCAL && !useECAL) {
+	    return cor.energyHCAL(clus);
+	} else {
+	    throw new AssertionError("duh");
+	}
+    }
+
+    protected double getClusterEnergy(Cluster currentCluster) {
+	// Use Ron's calibration
+	RonEnergyCalibration cor = new RonEnergyCalibration(m_event);
+	return cor.energy(currentCluster);
+    }
+
+    String m_inputClusterListName;
+    String m_nameOfClusterToInfoMap;
+    String m_nameOfHelixToClusterMap;
+    EventHeader m_event;
+
+    private String debugGetParents(MCParticle part)  
+    {
+	String output = new String();
+	List<MCParticle> parents = part.getParents();
+	for (MCParticle parent : parents) {
+	    output += parent.getType().getName();
+	    output += " ";
+	}
+	return output;
+    }
+    private String debugSimulationStatus(MCParticle part) 
+    {
+	MCParticle.SimulatorStatus status = part.getSimulatorStatus();
+	String statusString = new String();
+	if (status.hasLeftDetector()) {
+	    statusString += "hasLeftDetector; ";
+	}
+	if (status.isBackscatter()) {
+	    statusString += "isBackScatter; ";
+	}
+	if (status.isCreatedInSimulation()) {
+	    statusString += "createdInSimulation; ";
+	}
+	if (status.isDecayedInCalorimeter()) {
+	    statusString += "decayedInCalorimeter; ";
+	}
+	if (status.isDecayedInTracker()) {
+	    statusString += "decayedInTracker; ";
+	}
+	if (status.isStopped()) {
+	    statusString += "stopped; ";
+	}
+	if (status.vertexIsNotEndpointOfParent()) {
+	    statusString += "vertexIsNotEndpointOfParent; ";
+	}
+	return statusString;
+    }
+}
+

lcsim/src/org/lcsim/contrib/uiowa/structural
FragmentIdentifier.java added at 1.1
diff -N FragmentIdentifier.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ FragmentIdentifier.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,20 @@
+package structural;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+
+/**
+ * Determine whether a cluster is a fragment or not.
+ *
+ * @version $Id: FragmentIdentifier.java,v 1.1 2005/12/16 21:11:39 mcharles Exp $
+ */
+
+public interface FragmentIdentifier 
+{
+    /**
+     * Attempt to determine whether the cluster is a fragment.
+     *
+     * @return true if we think it is a fragment, false if not.
+     */
+    public boolean isFragment(Cluster clus, EventHeader event);
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
FragmentMerger.java added at 1.1
diff -N FragmentMerger.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ FragmentMerger.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,132 @@
+package structural;
+
+import java.util.List;
+import java.util.Vector;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.recon.cluster.util.BasicCluster;
+
+public class FragmentMerger extends Driver
+{
+    public FragmentMerger(String inputClusterListName, String outputClusterListName, FragmentIdentifier fragmentID)
+    {
+	m_inputClusterListName = inputClusterListName;
+	m_outputClusterListName = outputClusterListName;
+	m_fragmentID = fragmentID;
+    }
+
+    public void process(EventHeader event) 
+    {
+	m_event = event;
+
+	// Get input list:
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+
+	// Sanity check: Input # hits
+	int inputHitCount = 0;
+	for (Cluster clus : inputClusterList) {
+	    inputHitCount += clus.getCalorimeterHits().size();
+	}
+
+	// For each cluster, check for fragment-ness:
+	List<Cluster> fragments = new Vector<Cluster>();
+	List<Cluster> nonFragments = new Vector<Cluster>();
+	for (Cluster clus : inputClusterList) {
+	    if (m_fragmentID.isFragment(clus, event)) {
+		fragments.add(clus);
+	    } else {
+		// Wrap it:
+		BasicCluster newClus = new BasicCluster();
+		newClus.addCluster(clus);
+		nonFragments.add(newClus);
+	    }
+	}
+
+	if (nonFragments.size() == 0 && fragments.size()>0) {
+	    // Need at least one non-fragment!
+	    // Find the biggest fragment and promote it.
+	    int biggestNumHits = 0;
+	    Cluster biggest = null;
+	    for (Cluster fragment : fragments) {
+		int nHits = fragment.getCalorimeterHits().size();
+		if (nHits > biggestNumHits) {
+		    biggest = fragment;
+		    biggestNumHits = nHits;
+		}
+	    }
+	    fragments.remove(biggest);
+	    // Wrap it:
+	    BasicCluster newClus = new BasicCluster();
+	    newClus.addCluster(biggest);
+	    nonFragments.add(newClus);
+	}
+
+	// Loop over fragments...
+	for (Cluster fragment : fragments) {
+	    BasicCluster target = findBestMerge(fragment, nonFragments, fragments);
+	    if (target==null) {
+		if (nonFragments.size()==0) {
+		    throw new AssertionError("BUG: Zero non-fragments but >0 fragments (should not happen by construction)");
+		} else {
+		    System.out.println("BUG: Was trying to link fragment ["+fragment+"] with "+fragment.getCalorimeterHits().size()+" hits. Dumping list of non-fragments:");
+		    for (Cluster nonfrag : nonFragments) {
+			System.out.println("BUG:     ["+nonfrag+"] with "+nonfrag.getCalorimeterHits().size()+" hits");
+		    }
+		    throw new AssertionError("BUG: There are non-fragments, but none was found");
+		}
+	    } else {
+		target.addCluster(fragment);
+	    }
+	}
+
+	// We'll output the non-fragments:
+	List<Cluster> outputClusterList = nonFragments;
+
+	// Sanity check: Output # hits
+	int outputHitCount = 0;
+	for (Cluster clus : outputClusterList) {
+	    outputHitCount += clus.getCalorimeterHits().size();
+	}
+	if (inputHitCount != outputHitCount) {
+	    throw new AssertionError("Mismatch in hit counts: input("+inputHitCount+") != output("+outputHitCount+")");
+	}
+
+	// Write out:
+	event.put(m_outputClusterListName, outputClusterList);
+    }
+
+    protected BasicCluster findBestMerge(Cluster fragment, List<Cluster> nonFragments, List<Cluster> fragments)
+    {
+	// This is kind of dumb.
+        // What's the nearest non-fragment?
+	Cluster nearest = null;
+	double minDistance = 0;
+	for (Cluster nonFragment : nonFragments) {
+	    double distance = MiscUtilities.distance(fragment, nonFragment);
+	    if (distance<minDistance || nearest==null) {
+                nearest = nonFragment;
+                minDistance = distance;
+	    }
+	}
+	if (nearest == null) {
+	    if (nonFragments.size() != 0) {
+		throw new AssertionError("BUG: There are non-fragments, but none is the nearest");
+	    } else {
+		return null;
+	    }
+	} else if (nearest instanceof BasicCluster) {
+	    return (BasicCluster) (nearest);
+	} else {
+	    throw new AssertionError("Can't handle cluster of class '"+nearest.getClass().getName()+"' which is not an instance of BasicCluster.");
+	}
+    }
+
+    protected String m_inputClusterListName;
+    protected String m_outputClusterListName;
+    protected FragmentIdentifier m_fragmentID;
+    protected EventHeader m_event;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
FragmentRemover.java added at 1.1
diff -N FragmentRemover.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ FragmentRemover.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,51 @@
+package structural;
+
+import java.util.List;
+import java.util.Vector;
+
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.recon.cluster.util.BasicCluster;
+
+public class FragmentRemover extends Driver
+{
+    public FragmentRemover(String inputClusterListName, String outputClusterListName, FragmentIdentifier fragmentID)
+    {
+	m_inputClusterListName = inputClusterListName;
+	m_outputClusterListName = outputClusterListName;
+	m_fragmentID = fragmentID;
+    }
+
+    public void process(EventHeader event) 
+    {
+	// Get input list:
+	List<Cluster> inputClusterList = event.get(Cluster.class, m_inputClusterListName);
+
+	// For each cluster, check for fragment-ness:
+	List<Cluster> fragments = new Vector<Cluster>();
+	List<Cluster> nonFragments = new Vector<Cluster>();
+	for (Cluster clus : inputClusterList) {
+	    if (m_fragmentID.isFragment(clus, event)) {
+		fragments.add(clus);
+	    } else {
+		// Wrap it:
+		BasicCluster newClus = new BasicCluster();
+		newClus.addCluster(clus);
+		nonFragments.add(newClus);
+	    }
+	}
+
+	// We'll output the non-fragments:
+	List<Cluster> outputClusterList = nonFragments;
+
+	// Write out:
+	event.put(m_outputClusterListName, outputClusterList);
+    }
+
+    protected String m_inputClusterListName;
+    protected String m_outputClusterListName;
+    protected FragmentIdentifier m_fragmentID;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
HaloAssigner.java added at 1.1
diff -N HaloAssigner.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ HaloAssigner.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,117 @@
+package structural;
+
+import java.util.List;
+import java.util.Vector;
+import java.util.Set;
+import java.util.logging.Logger;
+import java.util.logging.Level;
+
+import hep.physics.vec.BasicHep3Vector;
+import hep.physics.vec.Hep3Vector;
+import hep.physics.vec.VecOp;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.recon.cluster.util.BasicCluster;
+
+/**
+ * This class scans a set of clusters. In the cases where those
+ * clusters have structure (i.e. a list of subclusters), it
+ * checks whether all hits in the parent cluster are assigned to
+ * a subcluster. If there are any unassigned hits, the class
+ * guesses the best match subcluster and adds the hits to it.
+ *
+ * Currently the matching algorithm is fixed; this should really
+ * be user-definable.
+ *
+ * @version $Id: HaloAssigner.java,v 1.1 2005/12/16 21:11:39 mcharles Exp $
+ */
+
+public class HaloAssigner extends Driver
+{
+    /**
+     * Constructor.
+     *
+     * @param inputMap A map from parent clusters to a list of subclusters within each parent cluster.
+     */
+    public HaloAssigner(String inputMap) 
+    {
+	m_inputMap = inputMap;
+    }
+
+    public void process(EventHeader event)
+    {
+	// Get the input map:
+	List<MapClusterToListOfClusters> inputMapList = event.get(MapClusterToListOfClusters.class, m_inputMap);
+	MapClusterToListOfClusters inputMap = inputMapList.iterator().next();
+	Set<Cluster> parentClusters = inputMap.keySet();
+	for (Cluster parentCluster : parentClusters) {
+	    List<Cluster> subClusters = inputMap.get(parentCluster);
+	    if (subClusters != null && subClusters.size()>0) {
+		// There is structure. Find whether there are unassigned hits:
+		List<CalorimeterHit> unassignedHits = new Vector<CalorimeterHit>();
+		unassignedHits.addAll(parentCluster.getCalorimeterHits());
+		for (Cluster subCluster : subClusters) {
+		    unassignedHits.removeAll(subCluster.getCalorimeterHits());
+		}
+		// Handle each unassigned hit:
+		for (CalorimeterHit hit : unassignedHits) {
+		    assignHit(hit, subClusters);
+		}
+	    }
+	}
+	// No need to write out, since we were editing a live copy
+    }
+
+    protected void assignHit(CalorimeterHit hit, List<Cluster> subClusters) 
+    {
+	Cluster bestMatch = null;
+	double minDistance = 0;
+	for (Cluster subCluster : subClusters) {
+	    double d = distance(subCluster, hit);
+	    if (bestMatch == null || d < minDistance) {
+		minDistance = d;
+		bestMatch = subCluster;
+	    }
+	}
+	if (bestMatch == null) {
+	    throw new AssertionError("No match");
+	} else if (! (bestMatch instanceof BasicCluster) ) {
+	    throw new AssertionError("Class mis-mismatch: "+bestMatch.getClass().getName()+" is not an instance of BasicCluster.");
+	} else {
+	    BasicCluster bestMatchBasicCluster = (BasicCluster) (bestMatch);
+	    bestMatchBasicCluster.addHit(hit);
+	}
+    }
+
+    // This belongs outside this class.
+    private double distance(Cluster clus, CalorimeterHit hit)
+    {
+	// Loop over hits...
+	boolean firstCheck = true;
+	double minDistance = Double.NaN; // Will stay NaN if clus is empty
+	List<CalorimeterHit> hits = clus.getCalorimeterHits();
+	for (CalorimeterHit hitInCluster : hits) {
+	    double dist = distance(hit, hitInCluster);
+	    if (firstCheck || dist<minDistance) {
+		minDistance = dist;
+		firstCheck = false;
+	    }
+	}
+
+	return minDistance;
+    }
+
+    // This belongs outside this class.
+    private double distance(CalorimeterHit hit1, CalorimeterHit hit2)
+    {
+	Hep3Vector vect1 = new BasicHep3Vector(hit1.getPosition());
+	Hep3Vector vect2 = new BasicHep3Vector(hit2.getPosition());
+	Hep3Vector displacement = VecOp.sub(vect1, vect2);
+	return displacement.magnitude();
+    }
+
+    protected String m_inputMap;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
MakeHelixSwimmersFromTruth.java added at 1.1
diff -N MakeHelixSwimmersFromTruth.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MakeHelixSwimmersFromTruth.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,85 @@
+package structural;
+
+import java.util.List;
+import java.util.Vector;
+import java.util.Map;
+
+import hep.physics.vec.*;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.geometry.Detector;
+import org.lcsim.util.swim.HelixSwimmer;
+import org.lcsim.event.MCParticle;
+
+/**
+ * This driver loops over a list of MC particles. For each, it makes a helix
+ * swimmer based on the truth information. The result is a
+ * Map<MCParticle, HelixSwimmer>. This is written as the first element of
+ * a List<Map<MCParticle, HelixSwimmer>> to the event header.
+ *
+ * Optional (to be added later): Add DecisionMaker<MCParticle> filters to
+ * select out specific particles (e.g. those with enough PT to reach the ECAL).
+ *
+ * Based on code by Steve McGill; adapted by Mat Charles.
+ */
+
+public class MakeHelixSwimmersFromTruth extends Driver 
+{
+    /**
+     * Constructor.
+     * @param nameOfHelixList Name to store the output under in the EventHeader
+     */
+    public MakeHelixSwimmersFromTruth(String nameOfHelixList) {
+	m_nameOfHelixList = nameOfHelixList;
+    }
+
+    /**
+     * For each MCParticle in the event, create a HelixSwimmer.
+     * Store a map from MCParticle to HelixSwimmer in the event
+     * header using the special utility class MapMCToHelix.
+     */
+    public void process(EventHeader event) {
+	
+	// Get the field strength using Steve McGill's code:
+	Detector det = event.getDetector();
+	double[] zero = {0, 0, 0};
+	double[] fieldStrength = det.getFieldMap().getField(zero);
+
+	int countExtrapolatedParticles = 0;
+	int countExtrapolatedParticlesPassingSteveCuts = 0;
+	MapMCToHelix outputMap = new MapMCToHelix();
+	List<MCParticle> mcps = event.getMCParticles();
+	for (MCParticle mcp : mcps) {
+	    if (mcp.getGeneratorStatus() != mcp.FINAL_STATE) {
+		//  Not a final-state particle => ignore.
+		continue;
+	    }
+	    int iq = (int)mcp.getCharge();
+	    if (iq == 0) {
+		// Neutral => ignore.
+		continue;
+	    }
+	    Hep3Vector origin = mcp.getOrigin();
+	    Hep3Vector momentum = mcp.getMomentum(); // Momentum at origin
+	    HelixSwimmer swimmer = new HelixSwimmer(fieldStrength[2]);
+	    swimmer.setTrack(momentum, origin, iq);
+	    outputMap.put(mcp, swimmer);
+	    // Checks:
+	    countExtrapolatedParticles++;
+	    double cosTheta = momentum.z()/momentum.magnitude();
+	    double pt = Math.sqrt(momentum.x()*momentum.x() + momentum.y()*momentum.y());
+	    if (Math.abs(cosTheta)<0.8 && pt>0.9525) {
+		countExtrapolatedParticlesPassingSteveCuts++;
+	    }
+	}
+	// Wrap the map:
+	List<MapMCToHelix> outputList = new Vector<MapMCToHelix>();
+	outputList.add(outputMap);
+	event.put(m_nameOfHelixList, outputList);
+
+	System.out.println(this.getClass().getName()+": Extrapolated "+countExtrapolatedParticles+" MC particles, of which "+countExtrapolatedParticlesPassingSteveCuts+" should hit barrel ECAL.");
+    }
+    
+    protected String m_nameOfHelixList;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
MakeSeparatedClusters.java added at 1.1
diff -N MakeSeparatedClusters.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MakeSeparatedClusters.java	16 Dec 2005 21:11:39 -0000	1.1
@@ -0,0 +1,79 @@
+package structural;
+
+import java.util.List;
+import java.util.Vector;
+
+import org.lcsim.event.EventHeader;
+import org.lcsim.util.Driver;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.CalorimeterHit;
+
+/**
+ * This class takes as input a complete list of clusters in the event,
+ * plus information on which of those clusters can be broken down.
+ * Any clusters which can be broken down are separated into a number
+ * of discrete clusters; any clusters which cannot be broken down
+ * are left unchanged. The output is a list of clusters. 
+ *
+ * When clusters can be broken down into subclusters, the union of
+ * the subclusters MUST contain each hit once and once only. Then
+ * by construction, each hit from the input list of clusters will 
+ * appear in exactly one cluster from the output list.
+ *
+ * @version $Id: MakeSeparatedClusters.java,v 1.1 2005/12/16 21:11:39 mcharles Exp $
+ */
+
+public class MakeSeparatedClusters extends Driver
+{
+
+    /**
+     * Constructor.
+     *
+     * @param inputListOfBigClusters        A complete list of clusters in the event.
+     * @param inputMapToSubClusters         For each big cluster with substructure, a list of disjoint subclusters
+     * @param outputListOfSeparatedClusters The list of clusters after breaking apart clusters with multiple subclusters
+     */
+    public MakeSeparatedClusters(String inputListOfBigClusters, String inputMapToSubClusters, String outputListOfSeparatedClusters)
+    {
+	m_inputListOfBigClusters = inputListOfBigClusters;
+	m_inputMapToSubClusters = inputMapToSubClusters;
+	m_outputListOfSeparatedClusters = outputListOfSeparatedClusters;
+    }
+
+    public void process(EventHeader event)
+    {
+	// Get the inputs:
+	List<Cluster> inputListOfBigClusters = event.get(Cluster.class, m_inputListOfBigClusters); 
+	List<MapClusterToListOfClusters> inputMapToSubClustersList = event.get(MapClusterToListOfClusters.class, m_inputMapToSubClusters);
+	MapClusterToListOfClusters inputMapToSubClusters = inputMapToSubClustersList.iterator().next();
+	// Prepare the output:
+	List<Cluster> outputListOfSeparatedClusters = new Vector<Cluster>();
+	// Loop over all clusters and deal with them:
+	for (Cluster currentCluster : inputListOfBigClusters) {
+	    List<Cluster> subClusters = inputMapToSubClusters.get(currentCluster);
+	    if (subClusters != null && subClusters.size()>0) {
+		// It has structure => need to break it apart.
+		int countHitsInSubClusters = 0;
+		for (Cluster subCluster : subClusters) {
+		    outputListOfSeparatedClusters.add(subCluster);
+		    countHitsInSubClusters += subCluster.getCalorimeterHits().size();
+		}
+		// Crosscheck: Ensure that the hit book-keeping is correct.
+		if (currentCluster.getCalorimeterHits().size() != countHitsInSubClusters) {
+		    throw new AssertionError("ERROR: Mismatch in number of hits. Full cluster has "+currentCluster.getCalorimeterHits().size()+" but sum of hits in subclusters is "+countHitsInSubClusters);
+		}
+		//System.out.println("DEBUG: "+this.getClass().getName()+": Broke a cluster with "+currentCluster.getCalorimeterHits().size()+" hits into "+subClusters.size()+" subclusters.");
+	    } else {
+		// It doesn't have structure -- just add it as-is to the output list
+		outputListOfSeparatedClusters.add(currentCluster);
+		//System.out.println("DEBUG: "+this.getClass().getName()+": Kept a luster with "+currentCluster.getCalorimeterHits().size()+" hits intact.");
+	    }
+	}
+	// Write out:
+	event.put(m_outputListOfSeparatedClusters, outputListOfSeparatedClusters);
+    }
+
+    protected String m_inputListOfBigClusters;
+    protected String m_inputMapToSubClusters;
+    protected String m_outputListOfSeparatedClusters;
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
MapClusterToExtrapolationInfo.java added at 1.1
diff -N MapClusterToExtrapolationInfo.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MapClusterToExtrapolationInfo.java	16 Dec 2005 21:11:40 -0000	1.1
@@ -0,0 +1,12 @@
+package structural;
+// Problems storing this within the event...
+
+import java.util.Map;
+import java.util.HashMap;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.Cluster;
+
+public class MapClusterToExtrapolationInfo extends HashMap<Cluster, TrackExtrapolationInfo>
+{
+
+}

lcsim/src/org/lcsim/contrib/uiowa/structural
MapClusterToListOfExtrapolationInfo.java added at 1.1
diff -N MapClusterToListOfExtrapolationInfo.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MapClusterToListOfExtrapolationInfo.java	16 Dec 2005 21:11:40 -0000	1.1
@@ -0,0 +1,12 @@
+package structural;
+// Problems storing this within the event...
+
+import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
+import org.lcsim.event.Cluster;
+
+public class MapClusterToListOfExtrapolationInfo extends HashMap<Cluster, List<TrackExtrapolationInfo>>
+{
+
+}
CVSspam 0.2.8