Print

Print


Commit in lcsim/src/org/lcsim/recon/pfa/structural on MAIN
ReclusterDTreeDriver.java+107-461.6 -> 1.7
MJC: Propagate a few bugfixes in PFA output to stable PFA

lcsim/src/org/lcsim/recon/pfa/structural
ReclusterDTreeDriver.java 1.6 -> 1.7
diff -u -r1.6 -r1.7
--- ReclusterDTreeDriver.java	13 Jul 2008 23:33:48 -0000	1.6
+++ ReclusterDTreeDriver.java	17 Jul 2008 04:33:02 -0000	1.7
@@ -35,7 +35,7 @@
   * in this package, which uses the implementation in
   * org.lcsim.recon.cluster.directedtree developed by NIU).
   *
-  * @version $Id: ReclusterDTreeDriver.java,v 1.6 2008/07/13 23:33:48 mcharles Exp $
+  * @version $Id: ReclusterDTreeDriver.java,v 1.7 2008/07/17 04:33:02 mcharles Exp $
   * @author Mat Charles <[log in to unmask]>
   */
 
@@ -83,7 +83,7 @@
     protected boolean m_fixJetsWithCone = true;
 
     public ReclusterDTreeDriver(String dTreeClusterList, String trackList, String mcList) {
-	System.out.println("ReclusterDTreeDriver version 0.3");
+	System.out.println("ReclusterDTreeDriver version 0.31");
 	initTrackMatch();
 	initCalibration();
 	initPlots();
@@ -1051,7 +1051,13 @@
 	// Calculate kinematic properties
 	double clusterEnergy = energy(clus, allSharedClusters, m_neutralCalib); // CHECK: Is this OK?
 	double clusterMomentumMagSq = clusterEnergy*clusterEnergy - mass_K0*mass_K0;
-	if (clusterMomentumMagSq<0.0) { clusterMomentumMagSq = 0.0; }
+	boolean treatAsZeroMass = false;
+	if (clusterMomentumMagSq<0.0) { 
+	    // Pathological case of E<M.
+	    // Revert to a zero mass particle here.
+	    clusterMomentumMagSq = 0.0; 
+	    treatAsZeroMass = true;
+	}
 	Hep3Vector pos = new BasicHep3Vector(clus.getPosition());
 	Hep3Vector threeMomentum = VecOp.mult(Math.sqrt(clusterMomentumMagSq), VecOp.unit(pos)); // assume it comes from the IP
 	HepLorentzVector fourMomentum = new BasicHepLorentzVector(clusterEnergy, threeMomentum);
@@ -1061,7 +1067,11 @@
 	// Set the PID and mass
 	part.addParticleID(pid_K0);
 	part.setParticleIdUsed(pid_K0);
-	part.setMass(mass_K0);
+	if (treatAsZeroMass) {
+	    part.setMass(0.0);
+	} else {
+	    part.setMass(mass_K0);
+	}
 	// All done
 	return part;
     }
@@ -2251,69 +2261,72 @@
 	// FIXME: Break up MultipleTrackTrack tracks.
 	for (Track tr : tracksSortedByMomentum) {
 	    boolean isElectron = electronTracks.contains(tr); // includes positrons
-	    BaseReconstructedParticle part = new BaseReconstructedParticle();
 	    // WARNING: Clusters may be entirely wrong for jets!
+	    List<Cluster> clustersOfTrack = new Vector<Cluster>();
 	    Set<Cluster> showerComponents = newMapTrackToShowerComponents.get(tr);
-	    for (Cluster clus : showerComponents) { part.addCluster(clus); }
+	    for (Cluster clus : showerComponents) { clustersOfTrack.add(clus); }
 	    Cluster sharedHitClus = makeClusterOfSharedHits(showerComponents, allSharedClusters);
-	    if (sharedHitClus.getCalorimeterHits().size()>0) { part.addCluster(sharedHitClus); }
+	    if (sharedHitClus.getCalorimeterHits().size()>0) { clustersOfTrack.add(sharedHitClus); }
+	    // Make ReconstructedParticle. May be more than one if MultipleTrackTrack.
+	    List<BaseReconstructedParticle> particles = new Vector<BaseReconstructedParticle>();
 	    if (tr instanceof MultipleTrackTrack) {
 		for (Track subtrack : tr.getTracks()) {
+		    BaseReconstructedParticle part = new BaseReconstructedParticle();
 		    part.addTrack(subtrack);
+		    particles.add(part);
 		}
 	    } else {
+		BaseReconstructedParticle part = new BaseReconstructedParticle();
 		part.addTrack(tr);
+		particles.add(part);
 	    }
-	    part.setCharge(tr.getCharge());
-	    Hep3Vector trackMomentum = momentum(tr);
-	    double trackMomentumMag = trackMomentum.magnitude();
-	    double currentParticleMass = mass_piplus;
-	    if (isElectron) {
-		currentParticleMass = mass_electron;
-	    }
-	    double trackEnergySq = trackMomentumMag*trackMomentumMag + currentParticleMass*currentParticleMass;
-	    HepLorentzVector fourMomentum = new BasicHepLorentzVector(Math.sqrt(trackEnergySq), trackMomentum);
-	    if (tr instanceof MultipleTrackTrack) {
-		double sumEnergy = 0.0;
-		Hep3Vector sumThreeMomentum = new BasicHep3Vector(0,0,0);
-		for (Track subtr : tr.getTracks()) {
-		    Hep3Vector subTrackMomentum = new BasicHep3Vector(subtr.getMomentum());
-		    double subTrackMomentumMag = subTrackMomentum.magnitude();
-		    double subTrackEnergySq = subTrackMomentumMag*subTrackMomentumMag + mass_piplus*mass_piplus;
-		    sumEnergy += Math.sqrt(subTrackEnergySq);
-		    sumThreeMomentum = VecOp.add(sumThreeMomentum, subTrackMomentum);
-		}
-		HepLorentzVector sumFourMomentum = new BasicHepLorentzVector(sumEnergy, sumThreeMomentum);
-		part.set4Vector(sumFourMomentum);
-		part.setMass(sumFourMomentum.magnitude());
-	    } else {
+	    if (particles.size() < 1) { throw new AssertionError("Book-keeping error"); }
+	    // Assign clusters to first particle in list
+	    for (Cluster clus : clustersOfTrack) {
+		particles.get(0).addCluster(clus);
+	    }
+	    // Make particles properly
+	    for (BaseReconstructedParticle part : particles) {
+		if (part.getTracks().size() != 1) { throw new AssertionError("Book-keeping error"); }
+		Track trackOfThisParticle = part.getTracks().get(0);
+		part.setCharge(trackOfThisParticle.getCharge());
+		Hep3Vector trackMomentum = momentum(trackOfThisParticle);
+		double trackMomentumMag = trackMomentum.magnitude();
+		double currentParticleMass = mass_piplus;
+		if (isElectron) {
+		    currentParticleMass = mass_electron;
+		}
+		double trackEnergySq = trackMomentumMag*trackMomentumMag + currentParticleMass*currentParticleMass;
+		HepLorentzVector fourMomentum = new BasicHepLorentzVector(Math.sqrt(trackEnergySq), trackMomentum);
 		part.set4Vector(fourMomentum);
 		part.setMass(mass_piplus);
-	    }
-	    part.setReferencePoint(new BasicHep3Vector(tr.getReferencePoint()));
-	    if (isElectron) {
-		part.addParticleID(pid_electron);
-		part.setParticleIdUsed(pid_electron);
-	    } else {
-		part.addParticleID(pid_piplus);
-		part.setParticleIdUsed(pid_piplus);
+		part.setReferencePoint(new BasicHep3Vector(trackOfThisParticle.getReferencePoint()));
+		if (isElectron) {
+		    part.addParticleID(pid_electron);
+		    part.setParticleIdUsed(pid_electron);
+		} else {
+		    part.addParticleID(pid_piplus);
+		    part.setParticleIdUsed(pid_piplus);
+		}
 	    }
 	    // Write out as charged track
 	    if (trackToTreatAsChargedParticles.contains(tr)) {
-		outputParticleList.add(part);
-		outputChargedParticleList.add(part);
+		outputParticleList.addAll(particles);
+		outputChargedParticleList.addAll(particles);
 	    }
 	    if (trackToTreatAsChargedParticlesWithEoverPveto.contains(tr)) {
-		outputChargedParticleListWithEoverPveto.add(part);
-		outputChargedParticleListWithEoverPveto_pass.add(part);
+		outputChargedParticleListWithEoverPveto.addAll(particles);
+		outputChargedParticleListWithEoverPveto_pass.addAll(particles);
 	    }
 	    // Confusion matrix accounting of calorimeter hits is straightforward
 	    // for non-jet tracks. For tracks in jets, need to be clever to make sure
 	    // each hit appears once and once only.
+	    // FIXME: This is now screwed up for MultipleTrackTracks.
+/*
 	    Set<Track> jet = mapTrackToJet.get(tr);
 	    boolean trackInJet = m_clusterAsJets && jet != null;
 	    if (!trackInJet) {
-		outputParticleListForConfusionMatrix_singleTracks.add(part);
+		outputParticleListForConfusionMatrix_singleTracks.addAll(particles);
 		// Make copy with cluster energy...
 		BaseReconstructedParticle part2 = new BaseReconstructedParticle();
 		BaseReconstructedParticle part3 = new BaseReconstructedParticle();
@@ -2419,6 +2432,7 @@
 		    outputParticleListForConfusionMatrix_jetTracksWithNoClusters.add(partForMatrix);
 		}
 	    }
+*/
 	}
 
 	if (chargedShowersToTreatAsNeutral.size()!=0) {
@@ -2535,6 +2549,52 @@
 	    }
 	}
 
+	// There may be some "neutral hadron cluster cores" that are too small to be
+	// real -- their measured energy is below their mass. They will need special treatment.
+	Set<Cluster> lowEnergyNeutralShowers = new HashSet<Cluster>();
+	for (Cluster clus : neutralClusterCores) {
+	    double clusterEnergy = energy(clus, allSharedClusters, m_neutralCalib);
+	    if (clusterEnergy <= mass_K0) {
+		lowEnergyNeutralShowers.add(clus);
+	    }
+	}
+	// If they are entirely in the ECAL then assume they are photons.
+	for (Cluster clus : lowEnergyNeutralShowers) {
+	    List<CalorimeterHit> hits = clus.getCalorimeterHits();
+	    ListFilter filter = new ListFilter(new HitInECALDecision());
+	    List<CalorimeterHit> hitsInEcal = filter.filterList(hits);
+	    boolean allHitsInEcal = (hitsInEcal.size() == hits.size());
+	    if (allHitsInEcal) {
+		extraClustersToTreatAsPhotons.add(clus);
+	    } else {
+		// Not entirely in ECAL. For now, just add it to whatever non-tiny neutral
+		// cluster is nearest. (Not ideal, but not crazy.)
+		Cluster nearestCluster = null;
+		double nearestDistance = 0;
+		for (Cluster otherClus : neutralClusterCores) {
+		    if (lowEnergyNeutralShowers.contains(otherClus)) {
+			// Don't attach to another teeny shower
+			continue;
+		    } else {
+			double dist = proximity(clus, otherClus);
+			if (nearestCluster==null || dist<nearestDistance) {
+			    nearestCluster = otherClus;
+			    nearestDistance = dist;
+			}
+		    }
+		}
+		if (nearestCluster != null) {
+		    // Migrate to the other cluster
+		    ((BasicCluster)(nearestCluster)).addCluster(clus);
+		    neutralClusterCores.remove(clus);
+		} else {
+		    // Couldn't attach it to anything
+		    double clusterEnergy = energy(clus, allSharedClusters, m_neutralCalib);
+		    System.out.println("WARNING: Unable to attach teeny neutral hadron cluster with E="+clusterEnergy+" to a larger shower. It will have unphysical energy.");
+		}
+	    }
+	}
+
 	// Photons (...)
 	List<Cluster> photonsToUse = new Vector<Cluster>(modifiedPhotonClusters);
 	photonsToUse.addAll(extraClustersToTreatAsPhotons);
@@ -2567,7 +2627,7 @@
 	}
 
 	// Make reconstructed particles from the neutral hadron showers:
-
+	List<Cluster> neutralClusterCoresUsed = new Vector<Cluster>();
 	for (Cluster clus : neutralClusterCores) {
 	    if (extraClustersToTreatAsPhotons.contains(clus)) {
 		// Skip this (already used as a photon)
@@ -2575,6 +2635,7 @@
 	    }
 	    // write out neutral hadron
 	    BaseReconstructedParticle part = makeNeutralHadron(clus, allSharedClusters);
+	    neutralClusterCoresUsed.add(clus);
 	    // Add to the output list
 	    outputParticleList.add(part);
 	    outputNeutralHadronParticleList.add(part);
@@ -2638,8 +2699,8 @@
 	// Start by recording all used cluster cores...
 	List<Cluster> allUsedCoresList = new Vector<Cluster>();
 	Set<Cluster> allUsedCores = new HashSet<Cluster>();
-	allUsedCores    .addAll(neutralClusterCores);
-	allUsedCoresList.addAll(neutralClusterCores);
+	allUsedCores    .addAll(neutralClusterCoresUsed);
+	allUsedCoresList.addAll(neutralClusterCoresUsed);
 	if (allUsedCores.size() != allUsedCoresList.size()) { throw new AssertionError("Mis-counting of clusters: "+allUsedCores.size()+" vs "+allUsedCoresList.size()); }
 	allUsedCores    .addAll(photonsUsed);
 	allUsedCoresList.addAll(photonsUsed);
CVSspam 0.2.8