Print

Print


Commit in lcsim/src/org/lcsim/contrib/CarstenHensel on MAIN
EMClusterIDAnalyzer.java+498added 1.1
MainLoop.java+45added 1.1
+543
2 added files
Carsten's contrib area created

lcsim/src/org/lcsim/contrib/CarstenHensel
EMClusterIDAnalyzer.java added at 1.1
diff -N EMClusterIDAnalyzer.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ EMClusterIDAnalyzer.java	21 Aug 2005 21:28:19 -0000	1.1
@@ -0,0 +1,498 @@
+/*
+ * EMClusterIDAnalyzer.java
+ *
+ * Created on August 21, 2005, 4:16 PM
+ *
+ * To change this template, choose Tools | Options and locate the template under
+ * the Source Creation and Management node. Right-click the template and choose
+ * Open. You can then make changes to the template in the Source Editor.
+ */
+
+package org.lcsim.contrib.CarstenHensel;
+
+/**
+ *
+ * @author carsten
+ */
+
+import java.util.*;
+import org.lcsim.event.*;
+import org.lcsim.event.util.CalorimeterHitEsort;
+import org.lcsim.geometry.CalorimeterIDDecoder;
+import org.lcsim.spacegeom.PrincipalAxesLineFitter;
+import org.lcsim.recon.cluster.nn.NearestNeighborClusterer;
+import org.lcsim.recon.cluster.fixedcone.FixedConeClusterer;
+import org.lcsim.recon.cluster.util.CalorimeterCluster;
+import org.lcsim.recon.cluster.util.BasicCluster;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+import org.lcsim.util.fourvec.*;
+import hep.aida.*;
+import hep.aida.ref.tree.TreeObjectAlreadyExistException;
+import java.text.*;
+import hep.physics.vec.*;
+
+/**
+ * Reconstruction: Clusters
+ *
+ * This program will apply the Fixed Cone reconstruction algorithm at varying
+ * opening angles of the cone (called "radius" simply because of the convention
+ * used by the SLAC developers in org.lcsim) for the data set under which it is
+ * run. The data will be stored in an AIDA tree under /fixedcone/r_(radius),
+ * where (radius) varies from 0.03 to 0.09 step 0.03
+ *
+ * @author Norman Graf with modifications by Eric Benavidez ([log in to unmask])
+ */
+public class EMClusterIDAnalyzer extends Driver {
+    // This is convenient for creating and filling histograms "on-the-fly".
+    private AIDA aida = AIDA.defaultInstance();
+    private ITree tree;
+    private DecimalFormat df;
+    private static final double SAMP_FRAC = 0.012;
+
+    // debugging flag:
+    // 0 no print out
+    private static final int debug = 0;
+    private static final int histoLevel = 0;
+    private static final double interactionRadius = 1260.0;
+
+    // Constructor: Initialize the class. This is the only constructor, and
+    // it requires no inputs.
+    //
+    public EMClusterIDAnalyzer() {
+	tree = aida.tree();
+	df = new DecimalFormat();
+    }
+
+    // Method calcVec: Calculate the four-vector for a given cluster with hits
+    // that must be decoded by the given decoder.
+    //
+    // Inputs: A hit decoder and a cluster.
+    // Output: The four-vector for the cluster.
+    //
+    public Momentum4Vector calcVec(CalorimeterIDDecoder decoder, BasicCluster clus) {
+	Momentum4Vector sum = new Momentum4Vector();
+	List<CalorimeterHit> hits = clus.getCalorimeterHits();
+
+	for (CalorimeterHit hit : hits) {
+	    double E = hit.getRawEnergy();
+	    decoder.setID(hit.getCellID());
+	    double phi = decoder.getPhi();
+	    double theta = decoder.getTheta();
+
+	    double px = E*Math.cos(phi)*Math.sin(theta);
+	    double py = E*Math.sin(phi)*Math.sin(theta);
+	    double pz = E*Math.cos(theta);
+
+	    sum.plusEquals(new Momentum4Vector(px, py, pz, E));
+	}
+	return sum;
+    }
+
+
+    // Method calcLayerEnergies: Calculate the layer energies for the given
+    // cluster with hits to be decoded by the given decoder.
+    //
+    // Inputs: A hit decoder and a cluster.
+    // Output: An ArrayList with elements indexed by layer number that contain
+    //         the energy in the layer indexed, i.e. returned_list[0] is the
+    //         energy in layer 0.
+    //
+    public ArrayList<Double> calcLayerEnergies(CalorimeterIDDecoder decoder, BasicCluster clus) {
+	ArrayList<Double> layerE = new ArrayList();
+	List<CalorimeterHit> hits = clus.getCalorimeterHits();
+
+	for (CalorimeterHit hit : hits) {
+	    double energy = hit.getRawEnergy();
+	    decoder.setID(hit.getCellID());
+	    int layer = decoder.getLayer();
+
+	    // Add the layer energy to the ArrayList. If it can't be added
+	    // within the current boundaries or be tacked onto the end of the
+	    // current list, tack elements onto the end until that is possible.
+	    //
+	    if (layerE.size() - 1 < layer) {
+		for (int ctr = layerE.size(); ctr < layer; ctr++)
+		    layerE.add(0.0);
+		layerE.add(energy);
+	    }
+	    else
+		layerE.add(layer, energy);
+	}
+	return layerE;
+    }
+
+
+    // Method calcCentroidDirCosines: Calculates the centroid and direction
+    // cosines for the given cluster with hits to be decoded by the given
+    // decoder.
+    //
+    // Inputs: A hit decoder and a cluster.
+    // Output: A 2-D double array. The first (leftmost) dimension is only of
+    //         length 2. Index 0 is for the centroid, and index 1 is for the
+    //         direction cosines. The second (rightmost) dimension keeps the
+    //         x, y, z coordinates in indices 0, 1, 2, respectively for
+    //         whatever quantity the first index is indexing. (i.e.
+    //         returned_array[0][2] is the z-coordinate of the cluster
+    //         centroid)
+    //
+    public double[][] calcCentroidDirCosines(CalorimeterIDDecoder decoder, BasicCluster clus) {
+	double[][] ret = new double[2][3];
+	List<CalorimeterHit> hits = clus.getCalorimeterHits();
+	double[][] points = new double[3][hits.size()];
+	int npoints = 0;
+
+	for (CalorimeterHit hit : hits) {
+	    decoder.setID(hit.getCellID());
+	    points[0][npoints] = decoder.getX();
+	    points[1][npoints] = decoder.getY();
+	    points[2][npoints] = decoder.getZ();
+	}
+
+	PrincipalAxesLineFitter lf = new PrincipalAxesLineFitter();
+	lf.fit(points);
+	ret[0] = lf.centroid();
+	ret[1] = lf.dircos();
+
+	return ret;
+    }
+
+    // Method clusterEnergy: Calculate the energy as the sum of the energies
+    // of the hits in clus. This is necessary at the moment since it seems
+    // that clus.getEnergy() doesn't give the sum of the energies of the hits
+    // in clus.
+    //
+    public double clusterEnergy(BasicCluster clus) {
+	List<CalorimeterHit> hits = clus.getCalorimeterHits();
+	double sum = 0.;
+	for (CalorimeterHit hit : hits) {
+	    sum += hit.getRawEnergy();
+	}
+	return sum;
+    }
+
+    // Efficiency test: Check if the clustering algorithm is
+    // "efficient" for this cluster that it found. Efficiency
+    // is defined as 
+    //   E_cluster > E_max - K*eRes(E)
+    // where eRes(E) is 0.2*sqrt(E), the energy resolution of
+    // the detector as a function of energy, and K in
+    // {0.1,0.25,0.5,1.0}
+    //
+    // Fill a histogram with 1 if this cluster is efficient,
+    // 0 if not.
+    //
+    public void testEfficiency(double eInit, double eMostEnergetic, double eMax) {
+	double[] K = {0.10,0.25,0.5,1.0,2.0,3.0};
+	StringBuffer buf = new StringBuffer();
+
+	for (double k : K) {
+	    buf.delete(0, buf.length());
+	    buf.insert(0, "k_");
+	    df.setMaximumFractionDigits(2);
+	    df.setMinimumFractionDigits(2);
+	    df.format(k, buf, new FieldPosition(df.INTEGER_FIELD));
+	    String kStr = buf.toString();
+
+	    IHistogram1D effHist = aida.histogram1D("Efficiency " + kStr,2,-.5,1.5);
+	    ICloud1D diffTest = aida.cloud1D("Efficiency test difference " + kStr);
+	    ICloud2D effMin = aida.cloud2D("Efficiency test min vs eMostEnergetic " + kStr);
+
+	    double effMinVal = eMax - k*0.2*Math.sqrt(eInit)*SAMP_FRAC;
+
+	    diffTest.fill(eMostEnergetic-effMinVal);
+	    effMin.fill(eMostEnergetic, effMinVal);
+
+	    if (eMostEnergetic > effMinVal) {
+		effHist.fill(1.0);
+		if (this.debug > 0)
+		    System.out.println("Filled efficiency with 1");
+	    }
+	    else {
+		effHist.fill(0.0);
+		if (this.debug > 0)
+		    System.out.println("Filled efficiency with 0");
+	    }
+	}
+    }
+
+    // Method calcMass: With the given raw energy and raw momentum vector,
+    // calculate the raw mass and its square.
+    //
+    // Inputs: The raw energy and the raw mass
+    // Output: An array of 2 elements: 0 - mass squared, 1 - mass (>= 0)
+    //
+    public double[] calcMass(double eRaw, Momentum4Vector pRaw) {
+	double[] mArray = new double[2];
+	mArray[0] = eRaw*eRaw - pRaw.vec3dot(pRaw);
+	if (mArray[0] < 0) {
+	    mArray[1] = 0.;
+	    if (mArray[0] < -1E-10) {
+		System.out.println("Massively negative m2Raw = " + mArray[0]);
+		System.out.println("eRaw = " + eRaw + ", p2Raw = " + pRaw.vec3dot(pRaw));
+	    }
+	}
+	else
+	    mArray[1] = Math.sqrt(mArray[0]);
+
+	return mArray;
+    }
+
+    // Method mkdirIgnoreExist: Makes a directory "dirname" in the
+    // ITree "tree" while discarding any exception that arises because
+    // the directory already exists. Also changes to directory
+    // "dirname" if it is found. If not, a runtime error results.
+    //
+    public void mkdirIgnoreExist(ITree tree, String dirname) {
+	try {
+	    tree.mkdir(dirname);
+	}
+	catch (IllegalArgumentException e) {
+	    if (! (e instanceof TreeObjectAlreadyExistException)) 
+		throw e;
+	}
+	tree.cd(dirname);
+    }
+
+    // Method process: Processes the events. This is where the events are fed
+    // when JAS3 feeds events to the program.
+    //
+    public void process(EventHeader event) {
+        super.process(event);
+	// Get MC Particle information - namely the energy of the final state
+	// particle. It looks as if for theta=90 we can get the initial state
+	// energy fairly accurately from the final state energy.
+
+	List<MCParticle> mcParticles = event.getMCParticles();
+	double eInit = 0.;
+	boolean neutron = false;
+
+	for (MCParticle mcParticle : mcParticles) {
+
+            if (mcParticle.getGeneratorStatus() == MCParticle.FINAL_STATE) {
+		aida.cloud1D("Radius of interaction").fill(mcParticle.getEndPoint().magnitude());
+ 
+		// Rejects photons with radius < 1260. Here, radius is not an
+		// angle but is an actual radius.
+		//
+		if (mcParticle.getType().getName().equals("gamma")
+		    && mcParticle.getEndPoint().magnitude() < this.interactionRadius) {
+		    aida.cloud1D("Rejected Particles").fill(0);
+ 
+		    if (this.debug > 0)
+			System.out.println("Rejected event " + event.getEventNumber());
+
+		    return;
+		}
+
+		eInit = mcParticle.getEnergy();
+		aida.cloud1D("eInit").fill(eInit);
+		if (mcParticle.getType().getName().equals("n"))
+		    neutron = true;	       
+	    }
+	}
+
+        List<CalorimeterHit> ecalBarrHits = event.get(CalorimeterHit.class,"EcalBarrHits");
+        List<CalorimeterHit> endcapHits = event.get(CalorimeterHit.class,"EcalEndcapHits");
+        CalorimeterIDDecoder _decoder = (CalorimeterIDDecoder) event.getMetaData(ecalBarrHits).getIDDecoder();
+
+        // create a map of cells keyed on their index
+        Map<Long, CalorimeterHit> hitmap = new HashMap<Long, CalorimeterHit>();
+
+	double sumBarrHits = 0.;
+	double sumEndcapHits = 0.; // NOTE: Only nonzero for neutrons.
+
+        for (CalorimeterHit hit : ecalBarrHits) {
+            long id = hit.getCellID();
+            double cell_energy = hit.getRawEnergy();
+	    sumBarrHits += cell_energy;
+	    aida.cloud1D("cell energy (cloud1D)").fill(cell_energy);
+	    aida.histogram1D("cell energy (histogram1D)",100,0.0,0.001).fill(cell_energy);
+            hitmap.put(id, hit);
+            _decoder.setID(id);
+        }
+
+	// Note: eMax is either
+	//  a) 5 GeV if the particle is a neutron (useful only for 20 GeV n),
+	//     or
+	//  b) The sum of the deposited energy in the ECAL if the particle is
+	//     not a neutron.
+	//
+	// This is for the efficiency test later on.
+	//
+	double eMax;
+	if (neutron == true) {
+	    eMax = 5*SAMP_FRAC;
+	    aida.cloud1D("neutron").fill(1);
+	}
+	else {
+	    for (CalorimeterHit hit : endcapHits) {
+		sumEndcapHits += hit.getRawEnergy();
+	    }
+	    eMax = sumBarrHits + sumEndcapHits;
+	    aida.cloud1D("neutron").fill(0);
+	}
+	aida.cloud1D("eMax").fill(eMax);
+
+	if (this.debug > 0)
+	    System.out.println("eInit = " + eInit + ", eMax = " + eMax);
+
+	
+        
+
+	// Organize an AIDA tree with the fixed cone clustering algorithm
+	// as a top-level directory
+
+	mkdirIgnoreExist(tree, "/FixedCone");
+
+	// Iterate over several values of the radius (i.e. cone opening angle)
+	// NB: "radius" really is cone opening angle but as mentioned before,
+	// this term is used because of the SLAC convention
+
+	StringBuffer buf = new StringBuffer(); 
+
+	for (double radius = 0.03; radius <= 0.09; radius += 0.03) {
+	    // Create new AIDA directory for the radius.
+	    // Ensure that radius directory has two figures after decimal pt.
+
+	    buf.delete(0, buf.length());
+	    buf.insert(0, "/FixedCone/r_");
+	    df.setMaximumFractionDigits(2);
+	    df.setMinimumFractionDigits(2);
+	    df.format(radius, buf, new FieldPosition(df.INTEGER_FIELD));
+	    String radstr = buf.toString();
+	    mkdirIgnoreExist(tree, radstr);
+
+	    // Zero threshold on the seed energy or the cluster energy,
+	    // respectively.
+	    //
+	    double seed = 0.;
+	    double minE = 0.;
+
+	    FixedConeClusterer fcc = new FixedConeClusterer(radius, seed, minE);
+	    List<BasicCluster> clusters = fcc.makeClusters(ecalBarrHits, _decoder);
+
+	    // Get all the clusters and cluster sets to prepare to loop over
+	    // them.
+
+	    List<List<Cluster>> clusterSets = event.get(Cluster.class);
+            if (this.histoLevel > 0) {
+                aida.cloud1D("clusterSets").fill(clusterSets.size());
+                aida.cloud1D("clusters").fill(clusters.size());
+            }
+
+	    // Accumulators for a raw (uncorrected for sampling fraction)
+	    // momentum 4-vector sum, a raw energy sum, and a corrected (for
+	    // sampling fraction) energy sum
+
+	    Momentum4Vector sumVecRaw = new Momentum4Vector();
+	    double sumERaw = 0.;
+	    double sumECorr = 0.;
+
+	    int ctr = 0;
+
+	    double eMostEnergetic = 0.; // Energy for most energetic cluster.
+                                        // Used for efficiency calculation.
+
+	    // Loop through all clusters
+	    for (BasicCluster cluster : clusters) {
+		// There appears to be a bug in that the cluster energy is less
+		// than the sum of the energy of each hit in the cluster. So
+		// calculate the cluster energy as the sum of the energy of
+		// each hit in the cluster.
+
+		double eRaw = clusterEnergy(cluster);
+		double eCorr = eRaw / SAMP_FRAC; // corrected energy
+		sumERaw += eRaw;
+		sumECorr += eCorr;
+
+		// This, along with eMostEnergetic = 0. (since we expect
+		// nothing less than 0) will give us the energy of the most
+		// energetic cluster when this loop is done
+		//
+		if (eRaw > eMostEnergetic)
+		    eMostEnergetic = eRaw;
+
+		aida.cloud1D("eRaw over eMax").fill(eRaw/eMax);
+
+		int ncells = cluster.getCalorimeterHits().size();
+
+		// Calculate, then accumulate, raw vector.
+		Momentum4Vector vecRaw = calcVec(_decoder, cluster);
+		sumVecRaw.plusEquals(vecRaw);
+
+		// Calculate the momentum and then the mass for this cluster
+		double[] mass = new double[2];
+		mass = calcMass(eRaw, vecRaw);
+
+		aida.cloud1D("Cluster raw mass").fill(mass[1]);
+		aida.cloud1D("Cluster raw m-squared").fill(mass[0]);
+
+		double mCorr = mass[1] / SAMP_FRAC;
+		aida.cloud1D("Cluster corrected mass").fill(mCorr);
+
+		aida.cloud1D("Number of cells in cluster").fill(ncells);
+		aida.histogram1D("Number of cells in cluster (histogram1D)",50,-0.5,49.5).fill(ncells);
+		aida.cloud2D("Raw Energy vs ncells").fill(ncells,eRaw);
+		aida.cloud1D("Cluster raw energy").fill(eRaw);
+		aida.cloud1D("Cluster corrected energy").fill(eCorr);
+
+		if(ncells>10) {
+		    ctr++;
+		    aida.cloud1D("eRaw over eMax ncells>10").fill(eRaw/eMax);
+		    aida.cloud1D("Cluster raw energy when ncell > 10 ").fill(eRaw);
+
+		    ArrayList<Double> layerE = calcLayerEnergies(_decoder, cluster);
+		    int j = 0;
+		    for (double lEnergy : layerE) {
+			double eFrac = lEnergy / eRaw;
+			aida.cloud2D("Raw Fractional Energy vs Layer").fill(j++,eFrac);
+		    }
+
+                    if (this.histoLevel > 0) {
+                        double[][] centroidDirCosines = calcCentroidDirCosines(_decoder, cluster);
+                        double[] pos = centroidDirCosines[0];
+                        aida.cloud2D("centroid x vs y").fill(pos[0], pos[1]);
+                        aida.cloud1D("centroid radius").fill( Math.sqrt(pos[0]*pos[0] + pos[1]*pos[1]) );
+                        
+                        double[] dircos = centroidDirCosines[1];
+                        for(int i=0; i< dircos.length; ++i) {
+                            aida.cloud1D("dircos "+i).fill(dircos[i]);
+                        }
+                    }
+		}
+	    }
+
+	    // Test the efficiency of the clusterer.
+	    this.testEfficiency(eInit, eMostEnergetic, eMax);
+
+	    aida.cloud1D("Number of clusters with ncell>10").fill(ctr);
+	    if ((debug > 0) && (ctr >= 2)) {
+		System.out.println("Ctr = " + ctr + ", Event = " + event.getEventNumber());
+	    }
+
+	    // Find reconstructed mass, both raw and corrected (in terms of
+	    // samp. frac.) and put them into a cloud.
+	    //
+	    double[] sumMass = calcMass(sumERaw, sumVecRaw);
+	    double sumMCorr = sumMass[1] / SAMP_FRAC;
+
+	    aida.cloud1D("Raw sum of energy").fill(sumERaw);
+	    aida.cloud1D("Raw sum of mass").fill(sumMass[1]);
+	    aida.cloud1D("Raw sum of m-squared").fill(sumMass[0]);
+
+	    aida.cloud1D("Corrected sum of energy").fill(sumECorr);
+	    aida.cloud1D("Corrected sum of mass").fill(sumMCorr);
+	    aida.histogram1D("Corrected sum of mass HISTOGRAM", 50, 0, 0.4).fill(sumMCorr);
+
+	    tree.cd("/");
+	}
+    }
+
+    protected void endOfData() {
+        aida.cloud1D("test").fill(-999);
+        System.out.println("do some clean up");
+    }
+}
+
+

lcsim/src/org/lcsim/contrib/CarstenHensel
MainLoop.java added at 1.1
diff -N MainLoop.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ MainLoop.java	21 Aug 2005 21:28:19 -0000	1.1
@@ -0,0 +1,45 @@
+/*
+ * MainLoop.java
+ *
+ * Created on August 21, 2005, 3:59 PM
+ *
+ * Java wrapper to enable running outside of JAS3
+ * 16-JUL-2005 Jan Strube
+ * from a response to the JAS mailing list by Tony Johnson
+ */
+
+package org.lcsim.contrib.CarstenHensel;
+
+/**
+ *
+ * @author carsten
+ */
+
+import java.io.File;
+import org.lcsim.mc.fast.MCFast;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+import org.lcsim.util.loop.LCIODriver;
+import org.lcsim.util.loop.LCSimLoop;
+
+
+public class MainLoop extends Driver{
+    
+    /** Creates a new instance of MainLoop */
+    public MainLoop() {
+    }
+    
+    
+    public static void main(String[] args) throws Exception{
+      LCSimLoop loop = new LCSimLoop();
+      File input = new File("/home/carsten/LC/DataSamples/gamma_Theta90_5GeV_SLIC_v1r9p3_cdcaug05.slcio");
+      loop.setLCIORecordSource(input);
+      loop.add(new EMClusterIDAnalyzer());
+//      File output = new File("exampleAnalysisJava.slcio");
+//      loop.add(new LCIODriver(output));
+      loop.loop(-1);
+      loop.dispose();
+      AIDA.defaultInstance().saveAs("exampleAnalysisJava.aida");
+   }
+}
+
CVSspam 0.2.8