lcsim/src/org/lcsim/contrib/CarstenHensel
diff -u -r1.2 -r1.3
--- EMClusterIDAnalyzer.java 22 Aug 2005 03:58:10 -0000 1.2
+++ EMClusterIDAnalyzer.java 24 Aug 2005 15:11:51 -0000 1.3
@@ -9,47 +9,29 @@
*/
package org.lcsim.contrib.CarstenHensel;
-
-/**
- *
- * @author carsten
- */
-
-import java.util.*;
import org.lcsim.event.*;
-import org.lcsim.geometry.CalorimeterIDDecoder;
-import org.lcsim.spacegeom.PrincipalAxesLineFitter;
-import org.lcsim.recon.cluster.fixedcone.FixedConeClusterer;
-import org.lcsim.recon.cluster.util.BasicCluster;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
-import org.lcsim.util.fourvec.*;
import hep.aida.*;
import hep.aida.ICloud2D;
-import hep.aida.ref.tree.TreeObjectAlreadyExistException;
import java.text.*;
/**
- * Reconstruction: Clusters
*
- * This program will apply the Fixed Cone reconstruction algorithm at varying
- * opening angles of the cone (called "radius" simply because of the convention
- * used by the SLAC developers in org.lcsim) for the data set under which it is
- * run. The data will be stored in an AIDA tree under /fixedcone/r_(radius),
- * where (radius) varies from 0.03 to 0.09 step 0.03
*
- * @author Norman Graf with modifications by Eric Benavidez ([log in to unmask])
+ *
+ *
*/
public class EMClusterIDAnalyzer extends Driver {
// This is convenient for creating and filling histograms "on-the-fly".
private AIDA aida = AIDA.defaultInstance();
private IAnalysisFactory af;
- private IHistogramFactory hf;
+ private IHistogramFactory hf;
private ITree tree;
private DecimalFormat df;
private static final double SAMP_FRAC = 0.012;
-
+
// debugging flag:
// 0 no print out
private static final int debug = 0;
@@ -66,464 +48,47 @@
// it requires no inputs.
//
public EMClusterIDAnalyzer() {
- this.tree = aida.tree();
- this.df = new DecimalFormat();
+ this.tree = aida.tree();
+ this.df = new DecimalFormat();
this.angleVSk = aida.cloud2D("angle vs k");
- FixedConeClustererAnalyzer fixedConeClustererAnalyzer = new FixedConeClustererAnalyzer(this.tree);
+
+
+// add sub-drivers
+ FixedConeClustererAnalyzer fixedConeClustererAnalyzer = new FixedConeClustererAnalyzer(this.tree);
fixedConeClustererAnalyzer.setAttributes(this.SAMP_FRAC, this.debug, this.interactionRadius);
this.add(fixedConeClustererAnalyzer);
+
+ HMatrixAnalyzer hMatrixAnalyzer = new HMatrixAnalyzer("/home/carsten/LC/Output/CH_cdcaug05_gamma_Theta90_5GeV.hmx", tree);
+ //HMatrixAnalyzer hMatrixAnalyzer = new HMatrixAnalyzer(tree);
+ this.add(hMatrixAnalyzer);
+
+ MyHMatrixBuilder builder = new MyHMatrixBuilder();
+ this.add(builder);
}
-
- // Method calcVec: Calculate the four-vector for a given cluster with hits
- // that must be decoded by the given decoder.
- //
- // Inputs: A hit decoder and a cluster.
- // Output: The four-vector for the cluster.
- //
- public Momentum4Vector calcVec(CalorimeterIDDecoder decoder, BasicCluster clus) {
- Momentum4Vector sum = new Momentum4Vector();
- List<CalorimeterHit> hits = clus.getCalorimeterHits();
-
- for (CalorimeterHit hit : hits) {
- double E = hit.getRawEnergy();
- decoder.setID(hit.getCellID());
- double phi = decoder.getPhi();
- double theta = decoder.getTheta();
-
- double px = E*Math.cos(phi)*Math.sin(theta);
- double py = E*Math.sin(phi)*Math.sin(theta);
- double pz = E*Math.cos(theta);
-
- sum.plusEquals(new Momentum4Vector(px, py, pz, E));
- }
- return sum;
- }
-
-
- // Method calcLayerEnergies: Calculate the layer energies for the given
- // cluster with hits to be decoded by the given decoder.
- //
- // Inputs: A hit decoder and a cluster.
- // Output: An ArrayList with elements indexed by layer number that contain
- // the energy in the layer indexed, i.e. returned_list[0] is the
- // energy in layer 0.
- //
- public ArrayList<Double> calcLayerEnergies(CalorimeterIDDecoder decoder, BasicCluster clus) {
- ArrayList<Double> layerE = new ArrayList();
- List<CalorimeterHit> hits = clus.getCalorimeterHits();
-
- for (CalorimeterHit hit : hits) {
- double energy = hit.getRawEnergy();
- decoder.setID(hit.getCellID());
- int layer = decoder.getLayer();
-
- // Add the layer energy to the ArrayList. If it can't be added
- // within the current boundaries or be tacked onto the end of the
- // current list, tack elements onto the end until that is possible.
- //
- if (layerE.size() - 1 < layer) {
- for (int ctr = layerE.size(); ctr < layer; ctr++)
- layerE.add(0.0);
- layerE.add(energy);
- }
- else
- layerE.add(layer, energy);
- }
- return layerE;
- }
-
-
- // Method calcCentroidDirCosines: Calculates the centroid and direction
- // cosines for the given cluster with hits to be decoded by the given
- // decoder.
- //
- // Inputs: A hit decoder and a cluster.
- // Output: A 2-D double array. The first (leftmost) dimension is only of
- // length 2. Index 0 is for the centroid, and index 1 is for the
- // direction cosines. The second (rightmost) dimension keeps the
- // x, y, z coordinates in indices 0, 1, 2, respectively for
- // whatever quantity the first index is indexing. (i.e.
- // returned_array[0][2] is the z-coordinate of the cluster
- // centroid)
- //
- public double[][] calcCentroidDirCosines(CalorimeterIDDecoder decoder, BasicCluster clus) {
- double[][] ret = new double[2][3];
- List<CalorimeterHit> hits = clus.getCalorimeterHits();
- double[][] points = new double[3][hits.size()];
- int npoints = 0;
-
- for (CalorimeterHit hit : hits) {
- decoder.setID(hit.getCellID());
- points[0][npoints] = decoder.getX();
- points[1][npoints] = decoder.getY();
- points[2][npoints] = decoder.getZ();
- }
-
- PrincipalAxesLineFitter lf = new PrincipalAxesLineFitter();
- lf.fit(points);
- ret[0] = lf.centroid();
- ret[1] = lf.dircos();
-
- return ret;
- }
-
- // Method clusterEnergy: Calculate the energy as the sum of the energies
- // of the hits in clus. This is necessary at the moment since it seems
- // that clus.getEnergy() doesn't give the sum of the energies of the hits
- // in clus.
- //
- public double clusterEnergy(BasicCluster clus) {
- List<CalorimeterHit> hits = clus.getCalorimeterHits();
- double sum = 0.;
- for (CalorimeterHit hit : hits) {
- sum += hit.getRawEnergy();
- }
- return sum;
- }
-
- // Efficiency test: Check if the clustering algorithm is
- // "efficient" for this cluster that it found. Efficiency
- // is defined as
- // E_cluster > E_max - K*eRes(E)
- // where eRes(E) is 0.2*sqrt(E), the energy resolution of
- // the detector as a function of energy, and K in
- // {0.1,0.25,0.5,1.0}
- //
- // Fill a histogram with 1 if this cluster is efficient,
- // 0 if not.
- //
- public void testEfficiency(double eInit, double eMostEnergetic, double eMax, double angle) {
- double[] K = {0.10,0.25,0.5,1.0,2.0,3.0};
- StringBuffer buf = new StringBuffer();
-
- //for (double k : K) {
- for (double k = 0.1; k < 2.0; k += 0.1) {
- buf.delete(0, buf.length());
- buf.insert(0, "k_");
- df.setMaximumFractionDigits(2);
- df.setMinimumFractionDigits(2);
- df.format(k, buf, new FieldPosition(df.INTEGER_FIELD));
- String kStr = buf.toString();
-
- IHistogram1D effHist = aida.histogram1D("Efficiency " + kStr,2,-.5,1.5);
- ICloud1D diffTest = aida.cloud1D("Efficiency test difference " + kStr);
- ICloud2D effMin = aida.cloud2D("Efficiency test min vs eMostEnergetic " + kStr);
-
- double effMinVal = eMax - k*0.2*Math.sqrt(eInit)*SAMP_FRAC;
-
- diffTest.fill(eMostEnergetic-effMinVal);
- effMin.fill(eMostEnergetic, effMinVal);
-
- if (eMostEnergetic > effMinVal) {
- effHist.fill(1.0);
- this.angleVSk.fill(angle, k, 1.0);
-
-
- if (this.debug > 0)
- System.out.println("Filled efficiency with 1");
- }
- else {
- effHist.fill(0.0);
- if (this.debug > 0)
- System.out.println("Filled efficiency with 0");
- }
- }
- }
-
- // Method calcMass: With the given raw energy and raw momentum vector,
- // calculate the raw mass and its square.
- //
- // Inputs: The raw energy and the raw mass
- // Output: An array of 2 elements: 0 - mass squared, 1 - mass (>= 0)
- //
- public double[] calcMass(double eRaw, Momentum4Vector pRaw) {
- double[] mArray = new double[2];
- mArray[0] = eRaw*eRaw - pRaw.vec3dot(pRaw);
- if (mArray[0] < 0) {
- mArray[1] = 0.;
- if (mArray[0] < -1E-10) {
- System.out.println("Massively negative m2Raw = " + mArray[0]);
- System.out.println("eRaw = " + eRaw + ", p2Raw = " + pRaw.vec3dot(pRaw));
- }
- }
- else
- mArray[1] = Math.sqrt(mArray[0]);
-
- return mArray;
- }
-
- // Method mkdirIgnoreExist: Makes a directory "dirname" in the
- // ITree "tree" while discarding any exception that arises because
- // the directory already exists. Also changes to directory
- // "dirname" if it is found. If not, a runtime error results.
- //
- public void mkdirIgnoreExist(ITree tree, String dirname) {
- try {
- tree.mkdir(dirname);
- }
- catch (IllegalArgumentException e) {
- if (! (e instanceof TreeObjectAlreadyExistException))
- throw e;
- }
- tree.cd(dirname);
+
+
+
+
+
+
+
+ protected void startOfData(){
+ super.startOfData();
}
-
- // Method process: Processes the events. This is where the events are fed
- // when JAS3 feeds events to the program.
- //
public void process(EventHeader event) {
super.process(event);
- // Get MC Particle information - namely the energy of the final state
- // particle. It looks as if for theta=90 we can get the initial state
- // energy fairly accurately from the final state energy.
-
-
this.eventCounter++;
- if (this.eventCounter % 50 == 0)
- System.out.println(this.eventCounter + " events read");
-
- List<MCParticle> mcParticles = event.getMCParticles();
- double eInit = 0.;
- boolean neutron = false;
-
- for (MCParticle mcParticle : mcParticles) {
-
- if (mcParticle.getGeneratorStatus() == MCParticle.FINAL_STATE) {
- aida.cloud1D("Radius of interaction").fill(mcParticle.getEndPoint().magnitude());
-
- /*
- * Rejects photons with radius < 1260. Here, radius is not an
- * angle but is an actual radius.
- *
- * 'pre-selection'
- */
-
- if (mcParticle.getType().getName().equals("gamma")
- && mcParticle.getEndPoint().magnitude() < this.interactionRadius) {
- aida.cloud1D("Rejected Particles").fill(0);
-
- if (this.debug > 0)
- System.out.println("Rejected event " + event.getEventNumber());
-
- return;
- }
- // count events which passed 'pre-selection
- this.passedEvents++;
-
-
- eInit = mcParticle.getEnergy();
- aida.cloud1D("eInit").fill(eInit);
- if (mcParticle.getType().getName().equals("n"))
- neutron = true;
- }
- }
-
-
- List<CalorimeterHit> ecalBarrHits = event.get(CalorimeterHit.class,"EcalBarrHits");
- List<CalorimeterHit> endcapHits = event.get(CalorimeterHit.class,"EcalEndcapHits");
- CalorimeterIDDecoder _decoder = (CalorimeterIDDecoder) event.getMetaData(ecalBarrHits).getIDDecoder();
-
- // create a map of cells keyed on their index
- Map<Long, CalorimeterHit> hitmap = new HashMap<Long, CalorimeterHit>();
-
- double sumBarrHits = 0.;
- double sumEndcapHits = 0.; // NOTE: Only nonzero for neutrons.
-
- for (CalorimeterHit hit : ecalBarrHits) {
- long id = hit.getCellID();
- double cell_energy = hit.getRawEnergy();
- sumBarrHits += cell_energy;
- aida.cloud1D("cell energy (cloud1D)").fill(cell_energy);
- aida.histogram1D("cell energy (histogram1D)",100,0.0,0.001).fill(cell_energy);
- hitmap.put(id, hit);
- _decoder.setID(id);
+ if ((this.eventCounter % 50) == 0) {
+ System.out.println("processing event " + this.eventCounter);
}
-
- // Note: eMax is either
- // a) 5 GeV if the particle is a neutron (useful only for 20 GeV n),
- // or
- // b) The sum of the deposited energy in the ECAL if the particle is
- // not a neutron.
- //
- // This is for the efficiency test later on.
- //
- double eMax;
- if (neutron == true) {
- eMax = 5*SAMP_FRAC;
- aida.cloud1D("neutron").fill(1);
- }
- else {
- for (CalorimeterHit hit : endcapHits) {
- sumEndcapHits += hit.getRawEnergy();
- }
- eMax = sumBarrHits + sumEndcapHits;
- aida.cloud1D("neutron").fill(0);
- }
- aida.cloud1D("eMax").fill(eMax);
-
- if (this.debug > 0)
- System.out.println("eInit = " + eInit + ", eMax = " + eMax);
-
-
-
-
- // Organize an AIDA tree with the fixed cone clustering algorithm
- // as a top-level directory
-
- mkdirIgnoreExist(tree, "/FixedCone");
-
- // Iterate over several values of the radius (i.e. cone opening angle)
- // NB: "radius" really is cone opening angle but as mentioned before,
- // this term is used because of the SLAC convention
-
- StringBuffer buf = new StringBuffer();
-
- for (double radius = 0.01; radius <= 0.09; radius += 0.01) {
- // Create new AIDA directory for the radius.
- // Ensure that radius directory has two figures after decimal pt.
-
- buf.delete(0, buf.length());
- buf.insert(0, "/FixedCone/r_");
- df.setMaximumFractionDigits(2);
- df.setMinimumFractionDigits(2);
- df.format(radius, buf, new FieldPosition(df.INTEGER_FIELD));
- String radstr = buf.toString();
- mkdirIgnoreExist(tree, radstr);
-
- // Zero threshold on the seed energy or the cluster energy,
- // respectively.
- //
- double seed = 0.;
- double minE = 0.;
-
- FixedConeClusterer fcc = new FixedConeClusterer(radius, seed, minE);
- List<BasicCluster> clusters = fcc.makeClusters(ecalBarrHits, _decoder);
-
- // Get all the clusters and cluster sets to prepare to loop over
- // them.
-
- List<List<Cluster>> clusterSets = event.get(Cluster.class);
- if (this.histoLevel > 0) {
- aida.cloud1D("clusterSets").fill(clusterSets.size());
- aida.cloud1D("clusters").fill(clusters.size());
- }
-
- // Accumulators for a raw (uncorrected for sampling fraction)
- // momentum 4-vector sum, a raw energy sum, and a corrected (for
- // sampling fraction) energy sum
-
- Momentum4Vector sumVecRaw = new Momentum4Vector();
- double sumERaw = 0.;
- double sumECorr = 0.;
-
- int ctr = 0;
-
- double eMostEnergetic = 0.; // Energy for most energetic cluster.
- // Used for efficiency calculation.
-
- // Loop through all clusters
- for (BasicCluster cluster : clusters) {
- // There appears to be a bug in that the cluster energy is less
- // than the sum of the energy of each hit in the cluster. So
- // calculate the cluster energy as the sum of the energy of
- // each hit in the cluster.
-
- double eRaw = clusterEnergy(cluster);
- double eCorr = eRaw / SAMP_FRAC; // corrected energy
- sumERaw += eRaw;
- sumECorr += eCorr;
-
- // This, along with eMostEnergetic = 0. (since we expect
- // nothing less than 0) will give us the energy of the most
- // energetic cluster when this loop is done
- //
- if (eRaw > eMostEnergetic)
- eMostEnergetic = eRaw;
-
- aida.cloud1D("eRaw over eMax").fill(eRaw/eMax);
-
- int ncells = cluster.getCalorimeterHits().size();
-
- // Calculate, then accumulate, raw vector.
- Momentum4Vector vecRaw = calcVec(_decoder, cluster);
- sumVecRaw.plusEquals(vecRaw);
-
- // Calculate the momentum and then the mass for this cluster
- double[] mass = new double[2];
- mass = calcMass(eRaw, vecRaw);
-
- aida.cloud1D("Cluster raw mass").fill(mass[1]);
- aida.cloud1D("Cluster raw m-squared").fill(mass[0]);
-
- double mCorr = mass[1] / SAMP_FRAC;
- aida.cloud1D("Cluster corrected mass").fill(mCorr);
-
- aida.cloud1D("Number of cells in cluster").fill(ncells);
- aida.histogram1D("Number of cells in cluster (histogram1D)",50,-0.5,49.5).fill(ncells);
- aida.cloud2D("Raw Energy vs ncells").fill(ncells,eRaw);
- aida.cloud1D("Cluster raw energy").fill(eRaw);
- aida.cloud1D("Cluster corrected energy").fill(eCorr);
-
- if(ncells>10) {
- ctr++;
- aida.cloud1D("eRaw over eMax ncells>10").fill(eRaw/eMax);
- aida.cloud1D("Cluster raw energy when ncell > 10 ").fill(eRaw);
-
- ArrayList<Double> layerE = calcLayerEnergies(_decoder, cluster);
- int j = 0;
- for (double lEnergy : layerE) {
- double eFrac = lEnergy / eRaw;
- aida.cloud2D("Raw Fractional Energy vs Layer").fill(j++,eFrac);
- }
-
- if (this.histoLevel > 0) {
- double[][] centroidDirCosines = calcCentroidDirCosines(_decoder, cluster);
- double[] pos = centroidDirCosines[0];
- aida.cloud2D("centroid x vs y").fill(pos[0], pos[1]);
- aida.cloud1D("centroid radius").fill( Math.sqrt(pos[0]*pos[0] + pos[1]*pos[1]) );
-
- double[] dircos = centroidDirCosines[1];
- for(int i=0; i< dircos.length; ++i) {
- aida.cloud1D("dircos "+i).fill(dircos[i]);
- }
- }
- }
- }
-
- // Test the efficiency of the clusterer.
- this.testEfficiency(eInit, eMostEnergetic, eMax, radius);
-
- aida.cloud1D("Number of clusters with ncell>10").fill(ctr);
- if ((debug > 0) && (ctr >= 2)) {
- System.out.println("Ctr = " + ctr + ", Event = " + event.getEventNumber());
- }
-
- // Find reconstructed mass, both raw and corrected (in terms of
- // samp. frac.) and put them into a cloud.
- //
- double[] sumMass = calcMass(sumERaw, sumVecRaw);
- double sumMCorr = sumMass[1] / SAMP_FRAC;
-
- aida.cloud1D("Raw sum of energy").fill(sumERaw);
- aida.cloud1D("Raw sum of mass").fill(sumMass[1]);
- aida.cloud1D("Raw sum of m-squared").fill(sumMass[0]);
-
- aida.cloud1D("Corrected sum of energy").fill(sumECorr);
- aida.cloud1D("Corrected sum of mass").fill(sumMCorr);
- aida.histogram1D("Corrected sum of mass HISTOGRAM", 50, 0, 0.4).fill(sumMCorr);
-
- tree.cd("/");
- }
}
-
+
+
protected void endOfData() {
super.endOfData();
System.out.println("Events analyzed " + this.eventCounter);
System.out.println("Events passed pre-selection " + this.passedEvents);
- //this.angleVSk.convert(10, 0.0, 1.0, 20, 0.0, 2.0);
- //this.angleVSkHist = this.angleVSk.histogram();
- //this.angleVSkHist.scale(1./this.passedEvents);
}
}
lcsim/src/org/lcsim/contrib/CarstenHensel
diff -u -r1.2 -r1.3
--- FixedConeClustererAnalyzer.java 22 Aug 2005 03:58:10 -0000 1.2
+++ FixedConeClustererAnalyzer.java 24 Aug 2005 15:11:51 -0000 1.3
@@ -3,20 +3,23 @@
*
* Created on August 21, 2005, 8:08 PM
*
- *
+ *
*/
package org.lcsim.contrib.CarstenHensel;
-
-import hep.aida.ICloud1D;
-import hep.aida.ICloud2D;
-import hep.aida.IHistogram1D;
import hep.aida.ITree;
+import hep.aida.ref.tree.TreeObjectAlreadyExistException;
import java.text.DecimalFormat;
import java.text.FieldPosition;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import org.lcsim.event.CalorimeterHit;
import org.lcsim.event.EventHeader;
import org.lcsim.event.MCParticle;
+import org.lcsim.geometry.subdetector.CalorimeterIDDecoder;
+import org.lcsim.recon.cluster.fixedcone.FixedConeClusterer;
+import org.lcsim.recon.cluster.util.BasicCluster;
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
@@ -27,28 +30,27 @@
public class FixedConeClustererAnalyzer extends Driver{
private AIDA aida = AIDA.defaultInstance();
private ITree tree;
- private String treePath = "";
+ private static final String treePath = "FixedConeClustererAnalyzer";
private DecimalFormat df;
private double SAMP_FRAC;
private double radiusCut;
private int debug;
private int eventCounter = 0;
+ private boolean initialized = false;
+ private FixedConeClusterer fcClusterer;
+ private CalorimeterIDDecoder decoder;
/** Creates a new instance of FixedConeClustererAnalyzer */
public FixedConeClustererAnalyzer() {
- this.df = new DecimalFormat();
- if (this.treePath == "")
- this.treePath = this.toString();
}
public FixedConeClustererAnalyzer(ITree tree) {
this();
this.tree = tree;
- this.tree.mkdir(this.treePath);
}
public void setAttributes(double samplingFraction, int debug, double radiusCut){
- this.SAMP_FRAC = samplingFraction;
+ this.SAMP_FRAC = samplingFraction;
this.debug = debug;
this.radiusCut = radiusCut;
}
@@ -57,75 +59,103 @@
boolean passed = false;
for (MCParticle mcParticle : mcParticles) {
if (mcParticle.getGeneratorStatus() == MCParticle.FINAL_STATE) {
-
- /*
+
+ /*
* Rejects photons with radius < 1260. Here, radius is not an
* angle but is an actual radius.
- *
+ *
* 'pre-selection'
*/
- if (mcParticle.getType().getName().equals("gamma")
- && mcParticle.getEndPoint().magnitude() < this.radiusCut) {
- passed = false;
- } else {
- passed = true;
+ if (mcParticle.getType().getName().equals("gamma")
+ && mcParticle.getEndPoint().magnitude() < this.radiusCut) {
+ passed = false;
+ } else {
+ passed = true;
}
}
}
return passed;
- }
+ }
+
+ public void initialize() {
+ this.tree.cd("/");
+ this.mkdirIgnoreExist(this.tree, this.treePath);
+ df = new DecimalFormat();
+ df.setMaximumFractionDigits(2);
+ df.setMinimumFractionDigits(2);
+ this.initialized = true;
+ }
public void process(EventHeader event) {
+
+ if (!this.initialized)
+ this.initialize();
+
List<MCParticle> mcParticles = event.getMCParticles();
- if (!this.eventPassed(mcParticles)) {
+ if (!this.eventPassed(mcParticles))
return;
- } else {
- this.eventCounter++;
- aida.cloud1D("FCCA").fill(1.0);
- }
+
+ this.tree.cd("/" + this.treePath);
+
+ List<CalorimeterHit> ecalBarrHits = event.get(CalorimeterHit.class,"EcalBarrHits");
+ List<CalorimeterHit> endcapHits = event.get(CalorimeterHit.class,"EcalEndcapHits");
+
+ double eMax = 0.0;
+ for (CalorimeterHit hit : endcapHits)
+ eMax += hit.getRawEnergy();
+
+ for (CalorimeterHit hit : ecalBarrHits)
+ eMax += hit.getRawEnergy();
+
+
+
+ this.decoder = (CalorimeterIDDecoder)event.getMetaData(ecalBarrHits).getIDDecoder();
+
+ // create a map of cells keyed on their index
+ Map<Long, CalorimeterHit> hitMap = new HashMap<Long, CalorimeterHit>();
+
+ double initSeed = 0.0;
+ double initEmin = 0.005;
+ for (double initRadius = 0.01; initRadius < 0.1; initRadius += 0.01) {
+ // set the tree path
+ StringBuffer buf = new StringBuffer();
+ buf.delete(0, buf.length());
+ buf.insert(0, "r_");
+ df.format(initRadius, buf, new FieldPosition(df.INTEGER_FIELD));
+ this.mkdirIgnoreExist(this.tree, buf.toString());
+
+ this.fcClusterer = new FixedConeClusterer(initRadius, initSeed, initEmin);
+
+ List<BasicCluster> clusters = fcClusterer.makeClusters(ecalBarrHits, decoder);
+ for (BasicCluster cluster : clusters) {
+ if(cluster.getCalorimeterHits().size() > 5) { // number of cells
+ aida.cloud1D("cluster energy over total energy").fill(cluster.getRawEnergy() / eMax);
+ }
+ }
+ this.tree.cd("/" + this.treePath);
+ }
}
- public void testEfficiency(double eInit, double eMostEnergetic, double eMax, double angle) {
- double[] K = {0.10,0.25,0.5,1.0,2.0,3.0};
- StringBuffer buf = new StringBuffer();
-
- //for (double k : K) {
- for (double k = 0.1; k < 2.0; k += 0.1) {
- buf.delete(0, buf.length());
- buf.insert(0, "k_");
- df.setMaximumFractionDigits(2);
- df.setMinimumFractionDigits(2);
- df.format(k, buf, new FieldPosition(df.INTEGER_FIELD));
- String kStr = buf.toString();
-
- IHistogram1D effHist = aida.histogram1D("Efficiency " + kStr,2,-.5,1.5);
- ICloud1D diffTest = aida.cloud1D("Efficiency test difference " + kStr);
- ICloud2D effMin = aida.cloud2D("Efficiency test min vs eMostEnergetic " + kStr);
-
- double effMinVal = eMax - k*0.2*Math.sqrt(eInit)*SAMP_FRAC;
-
- diffTest.fill(eMostEnergetic-effMinVal);
- effMin.fill(eMostEnergetic, effMinVal);
-
- if (eMostEnergetic > effMinVal) {
- effHist.fill(1.0);
-
-
- if (this.debug > 0)
- System.out.println("Filled efficiency with 1");
- }
- else {
- effHist.fill(0.0);
- if (this.debug > 0)
- System.out.println("Filled efficiency with 0");
- }
- }
- }
-
- protected void endOfData() {
+
+
+ protected void endOfData() {
System.out.println(this.eventCounter + " events analyzed by FCCA");
- }
+ }
+ // Method mkdirIgnoreExist: Makes a directory "dirname" in the
+ // ITree "tree" while discarding any exception that arises because
+ // the directory already exists. Also changes to directory
+ // "dirname" if it is found. If not, a runtime error results.
+ //
+ public void mkdirIgnoreExist(ITree tree, String dirname) {
+ try {
+ tree.mkdir(dirname);
+ } catch (IllegalArgumentException e) {
+ if (! (e instanceof TreeObjectAlreadyExistException))
+ throw e;
+ }
+ tree.cd(dirname);
+ }
}