lcsim/src/org/lcsim/contrib/SteveMagill
diff -N HMPhoDriver.java
--- /dev/null 1 Jan 1970 00:00:00 -0000
+++ HMPhoDriver.java 29 May 2008 21:30:41 -0000 1.1
@@ -0,0 +1,373 @@
+package org.lcsim.contrib.compile.SteveMagill;
+
+// Photon Finder
+// Input : EM Clusters (DT, Cone)
+// 1) test clusters for proximity to track - if close enough, eliminate from photon list
+// default is 0.02 in track-cluster distance (theta-phi)
+// 2) H-Matrix test on most remaining clusters
+// Requirements : a) minimum of 25 hit pixels in cluster
+// b) minimum of 500 MeV energy (calibrated)
+// c) 1st layer hit < 8 (~5 X0)
+// Re-cluster hits with NN 1111 to define cluster core
+// if 1 NN cluster is defined and min number cells is 25, then apply H-Matrix test
+// else use original cluster and apply H-Matrix test
+// Minimum Log Diagonal Chisquared Probability is -9.9
+// Passing clusters are photons
+// 3) for remaining clusters :
+// if 1st layer < 8, photon if min energy > 100 MeV (calibrated)
+// else photon if :
+// no hadron cluster within 0.02 (same as above track-cluster distance)
+// and min energy > 100 MeV (calibrated)
+// output : list of photon clusters arranged by subdetector, list of photon reconstructed particles
+
+import java.util.*;
+import org.lcsim.event.*;
+import org.lcsim.util.Driver;
+import org.lcsim.util.swim.*;
+import org.lcsim.util.lcio.LCIOConstants;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.util.aida.AIDA;
+import hep.aida.ITree;
+import org.lcsim.geometry.*;
+import hep.physics.vec.Hep3Vector;
+import hep.physics.vec.BasicHep3Vector;
+import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
+import org.lcsim.recon.ztracking.cheater.*;
+import org.lcsim.recon.cluster.fixedcone.*;
+import org.lcsim.recon.cluster.analysis.*;
+import org.lcsim.recon.emid.hmatrix.HMatrix;
+import org.lcsim.recon.emid.hmatrix.HMatrixTask;
+import org.lcsim.recon.emid.hmatrix.HMatrixBuilder;
+import org.lcsim.recon.emid.hmatrix.HMatrixConditionsConverter;
+import org.lcsim.math.chisq.ChisqProb;
+import org.lcsim.recon.cluster.nn.NearestNeighborCluster;
+import org.lcsim.spacegeom.*;
+import org.lcsim.util.hitmap.HitMap;
+import org.lcsim.detector.DetectorIdentifierHelper;
+import org.lcsim.detector.identifier.*;
+
+public class HMPhoDriver extends Driver
+{
+ private AIDA aida = AIDA.defaultInstance();
+ private int _mincells;
+ private double _dTrcl;
+ private double _minE;
+ private HMatrixTask _task = HMatrixTask.ANALYZE;
+ private int _nLayers;
+ private HMatrixBuilder _hmb;
+ private HMatrix _hmx;
+ private ITree _tree;
+ private boolean _initialized = false;
+ private int _nmeasB;
+ private int _nmeasEC;
+ private static final double _log10inv = 1./Math.log(10.0);
+ private int _logEIndex;
+ double[] _vals;
+// private double[] BRadii = new double[100];
+// private double[] ECZs = new double[100];
+ private boolean phoD = false;
+ private boolean phoRes = false;
+ private String _clusname;
+ private String _oclname;
+
+ public HMPhoDriver(int mincells, double dTrcl, double minE)
+ {
+ _mincells = mincells;
+ _dTrcl = dTrcl;
+ _minE = minE;
+ // The HMatrix could possibly change, so be sensitive to this.
+ getConditionsManager().registerConditionsConverter(new HMatrixConditionsConverter());
+ }
+
+// public HMPhoDriver()
+// {
+// this(HMatrixTask.ANALYZE);
+// }
+
+// public HMPhoDriver(HMatrixTask task)
+// {
+// _tree = aida.tree();
+// _task = task;
+// if(_task==HMatrixTask.ANALYZE)
+// {
+ // The HMatrix could possibly change, so be sensitive to this.
+// getConditionsManager().registerConditionsConverter(new HMatrixConditionsConverter());
+// }
+// }
+
+ protected void process(EventHeader event)
+ {
+ super.process(event); // executes all added drivers
+
+ if(!_initialized)
+ {
+ CylindricalCalorimeter calsubB = (CylindricalCalorimeter)event.getDetector().getSubdetectors().get("EMBarrel");
+ _nLayers = calsubB.getLayering().getLayerCount();
+ System.out.println("found "+_nLayers+" layers in the EMBarrel");
+// _nLayers = 29;
+ _nmeasB = _nLayers;
+ _nmeasB+=1;
+ _logEIndex = _nLayers;
+ _vals = new double[_nmeasB];
+// for (int i=0; i<_nLayersB; ++i)
+// {
+// BRadii[i]=calsubB.getLayering().getDistanceToLayerSensorMid(i);
+// }
+
+// CylindricalCalorimeter calsubEC = (CylindricalCalorimeter)event.getDetector().getSubdetectors().get("EMEndcap");
+// _nLayersEC = calsubEC.getLayering().getLayerCount();
+// System.out.println("found "+_nLayersEC+" layers in the EMEndcap");
+// _nmeasEC = _nLayersEC;
+// _logEIndexEC = _nmeasEC;
+ // _nmeasEC+=1;
+ // _valsEC = new double[_nmeasEC];
+ // for (int i=0; i<_nLayersEC; ++i)
+ // {
+// ECZs[i]=calsubEC.getLayering().getDistanceToLayerSensorMid(i);
+// }
+
+ //FIXME key needs to be better defined
+ int key = 0;
+ if(_task==HMatrixTask.ANALYZE)
+ {
+ //FIXME need to fetch name of HMAtrix file to use from a conditions file
+// System.out.println("Looking for HMatrix...");
+ _hmx = getConditionsManager().getCachedConditions(HMatrix.class,
+ "LongitudinalHMatrix.hmx").getCachedData();
+ }
+ _initialized = true;
+ }
+
+ // setup a map linking found photon clusters to their positions
+ Map<BasicCluster, SpacePoint> phoposmap = new HashMap<BasicCluster, SpacePoint>();
+ // loop over all EM clusters, check starting layer, check to see if no tracks nearby,
+ // make a new NN1118 cluster out of the hits,
+ // evaluate 1118 cluster with the HMatrix
+ List<BasicCluster> CandClusters = new ArrayList<BasicCluster>();
+ List<BasicCluster> TotNNclusters = new ArrayList<BasicCluster>();
+ List<BasicCluster> HMPhoclusters = new ArrayList<BasicCluster>(); // found H-M in B
+// Map<Track, SpacePoint> trposmap = (Map<Track, SpacePoint>) event.get("TrackPosMap");
+ // use track extrapolated position at shower max map for this routine (don't need mips)
+ Map<Track, SpacePoint> tresmmap = (Map<Track, SpacePoint>) event.get("TrackXEShMaxMap");
+ try
+ {
+ List<BasicCluster> DTclusters = event.get(BasicCluster.class,_clusname);
+
+ for (BasicCluster dtclus : DTclusters)
+ {
+// boolean phoclb = false;
+ List<BasicCluster> ennclusters = new ArrayList<BasicCluster>();
+ BasicCluster candclus = new BasicCluster();
+ // get cluster info
+ double ecp[] = dtclus.getPosition();
+ double ecpx = ecp[0];
+ double ecpy = ecp[1];
+ double ecpz = ecp[2];
+ double ecR = Math.sqrt(ecpx*ecpx+ecpy*ecpy);
+ double ebclth = Math.atan(ecR/ecpz);
+ if (ebclth<0) ebclth+=Math.PI;
+ double ebclph = Math.atan2(ecpy,ecpx);
+ if (ebclph<0) ebclph+=2*Math.PI;
+ if (phoD) aida.cloud1D("Theta of HM Pho Cand Clusters").fill(ebclth);
+ if (phoD) aida.cloud1D("Phi of HM Pho Cand Clusters").fill(ebclph);
+ if (phoD) aida.cloud2D("Theta vs Phi of HM Pho Cand Clusters").fill(ebclph,ebclth);
+
+ // get first layer info for this cluster
+ double[] layE = layerEnergies(dtclus);
+ int layint = 0;
+ for(int j=0; j<layE.length; ++j)
+ {
+ layint = j;
+ if (layE[j] > 0.02) break;
+ }
+ if (phoD) aida.cloud1D("Layer of first photon cand interaction").fill(layint);
+
+ // Now, first check interaction layer - must be < 8 or probably not photon - get out
+ if (layint>7) continue;
+
+ // add check here for closeness to track
+ int tcmtch = 0;
+ for (Track itr : tresmmap.keySet())
+ {
+ SpacePoint trsp = tresmmap.get(itr);
+ double trsmth = Math.atan(trsp.rxy()/trsp.z());
+ if (trsmth<0) trsmth+=Math.PI;
+ double trsmph = Math.atan2(trsp.y(),trsp.x());
+ if (trsmph<0) trsmph+=2*Math.PI;
+ double distth = Math.abs(ebclth-trsmth);
+ double distph = Math.abs(ebclph-trsmph);
+ if (distph>Math.PI) distph = 2*Math.PI-distph;
+ double distclmip = Math.sqrt(distth*distth+distph*distph);
+ if (distclmip<0.2)
+ {
+ if (phoD) aida.cloud1D("PhoCand Clus Mip Clus Dist").fill(distclmip);
+ if (phoD) aida.cloud2D("Theta vs Phi of TrXShMax in HMPho").fill(trsmph,trsmth);
+ }
+ if (distclmip<_dTrcl) tcmtch++;
+ }
+ // if too close to track, don't check
+ if (tcmtch>0) continue;
+
+ // now check for photons with H-matrix using NN to recluster photon candidate cluster
+ if (dtclus.getEnergy()>_minE && dtclus.getCalorimeterHits().size()>_mincells)
+ {
+ // Make a hitmap of cluster hits to re-cluster with NN clusterer
+ HitMap ebhitmap = new HitMap();
+ for (CalorimeterHit ebhit : dtclus.getCalorimeterHits())
+ {
+ long cID = ebhit.getCellID();
+ ebhitmap.put(cID, ebhit);
+ }
+ int _dU = 1;
+ int _dV = 1;
+ int _dLayer = 1;
+ double ethresh = 0.;
+
+ for (;;)
+ {
+ if (ebhitmap.isEmpty()) break;
+ long hID = ebhitmap.keySet().iterator().next();
+ CalorimeterHit hit = ebhitmap.get(hID);
+ NearestNeighborCluster nnclus = new NearestNeighborCluster(ebhitmap, hit, hID, _dU, _dV, _dLayer, ethresh);
+ if(nnclus.getCalorimeterHits().size()>_mincells)
+ {
+ ennclusters.add(nnclus);
+// System.out.println("Formed NN cluster from FC cluster");
+ TotNNclusters.add(nnclus);
+ }
+ }
+
+ // if number of NN clusters is = 1, use it to test with H-Matrix, else use original FC cluster
+ if (ennclusters.size()==1 && ennclusters.get(0).getCalorimeterHits().size()>_mincells)
+ {
+ candclus = ennclusters.get(0);
+ } else
+ {
+ candclus = dtclus;
+ }
+ CandClusters.add(candclus);
+
+ // check if this cluster passes H-Matrix test, if so, add candclus to list of Photon Clusters
+ double energy = candclus.getRawEnergy();
+ double clEn = candclus.getEnergy();
+ double[] layerE = layerEnergies(candclus);
+ // accumulate the longitudinal energy fractions...
+ // tried to kludge to correct for photons interacting late - ned to redo layer info
+ int layfirst = 0;
+ for(int j=0; j<layerE.length; ++j)
+ {
+ layfirst = j;
+ if (layerE[j] > 0.02) break;
+ }
+// System.out.println("Length is " + layerE.length + "first layer is " + layfirst);
+ if (phoD) aida.cloud1D("Layer of first HM cand interaction").fill(layfirst);
+
+ for(int i=0; i<layerE.length; ++i)
+ {
+ if (i<layerE.length-layfirst)
+ {
+ _vals[i] = layerE[i+layfirst];
+ } else
+ {
+ _vals[i] = 0.;
+ }
+ if (phoD) aida.cloud2D("Fractional Energy vs Layer").fill(i,_vals[i]);
+ }
+ // Add the correlation to the log of the cluster energy
+ _vals[_logEIndex]=Math.log10(energy);
+ // have now filled the vector of measurements. need to either accumulate the HMatrix or apply it
+ if (_task==HMatrixTask.ANALYZE)
+ {
+// if (phoD) aida.cloud1D("nmeas").fill(_nmeasB);
+ double chisq = _hmx.chisquared(_vals);
+ if (phoD) aida.cloud1D("Chisq").fill(chisq);
+ if (phoD) aida.cloud2D("Chisq vs energy").fill(energy,chisq);
+ if (phoD) aida.cloud1D("Chisq Probability").fill(ChisqProb.gammq(_nmeasB,chisq));
+ if (phoD) aida.cloud1D("Log Chisq Prob").fill(Math.log10(ChisqProb.gammq(_nmeasB,chisq)));
+ double chisqD = _hmx.chisquaredDiagonal(_vals);
+ if (phoD) aida.cloud1D("ChisqD").fill(chisqD);
+ if (phoD) aida.cloud2D("ChisqD vs energy").fill(energy,chisqD);
+ double chiprobD = ChisqProb.gammq(_nmeasB,chisqD);
+ if(chiprobD<0.0000000000001) chiprobD=0.0000000000001;
+ if (phoD) aida.cloud1D("ChisqD Probability").fill(chiprobD);
+ double logchiprobD = Math.log10(chiprobD);
+ if (phoD) aida.cloud1D("Log ChisqD Probability").fill(logchiprobD);
+ if (phoD) aida.cloud2D("Chisq vs LogChisqD Prob").fill(logchiprobD,chisq);
+ if (phoD) aida.cloud2D("Clus E vs Log ChisqD Prob").fill(logchiprobD,clEn);
+ double chisqND = chisq - chisqD;
+ if (phoD) aida.cloud1D("ChisqND").fill(chisqND);
+ if (logchiprobD > -5.9)
+ {
+ HMPhoclusters.add(dtclus);
+ }
+ } // end of H-Matrix test loop
+ } // end of minE, mincell requirement loop
+ } // end of cluster loop
+ }
+ catch (java.lang.IllegalArgumentException ex)
+ {
+ System.out.println("No clusters in photon finder");
+ }
+
+ event.put("AllEMNNClusters",TotNNclusters);
+ event.put("PhoCandClusters",CandClusters);
+ event.put(_oclname,HMPhoclusters);
+
+ if (phoD)
+ {
+ aida.cloud1D("Number of Photon Clusters").fill(HMPhoclusters.size());
+ double PFAPhoE = 0;
+ for (BasicCluster pho : HMPhoclusters)
+ {
+ PFAPhoE += pho.getEnergy();
+ aida.cloud1D("Number of hits in Photon Cluster").fill(pho.getCalorimeterHits().size());
+ aida.cloud1D("Energy of Photon Cluster").fill(pho.getEnergy());
+ aida.cloud2D("Num hits vs E of Photons").fill(pho.getEnergy(),pho.getCalorimeterHits().size());
+ }
+ if (HMPhoclusters.size()==1) aida.cloud1D("Single Photon ESum").fill(PFAPhoE);
+ if (HMPhoclusters.size()>0) aida.cloud1D("Total Photon ESum").fill(PFAPhoE);
+ }
+ } // end of event loop
+
+ private double[] layerEnergies(BasicCluster clus)
+ {
+ //FIXME could reuse this array
+ double[] layerEnergies = new double[_nLayers];
+ double clusterEnergy = 0.;
+ List<CalorimeterHit> hits = clus.getCalorimeterHits();
+// CalorimeterIDDecoder _decoderB = hit
+// CalorimeterIDDecoder _decoderB = (CalorimeterIDDecoder) event.getMetaData(chitsB).getIDDecoder();
+ //System.out.println("New cluster with "+hits.size()+ " hits"); // and energy "+clus.getEnergy());
+ for(CalorimeterHit hit : hits)
+ {
+ IDDecoder _decoder = hit.getIDDecoder();
+ _decoder.setID(hit.getCellID());
+ double e = hit.getRawEnergy();
+ int layer =_decoder.getLayer();
+// System.out.println("layer "+layer+" energy "+e);
+ clusterEnergy+=e;
+ layerEnergies[layer]+=e;
+ }
+// System.out.println("Layer 2 energy " + layerEnergies[2]);
+ //System.out.println("Cluster energy = " + clusterEnergy + "\n");
+
+ for(int i=0; i<layerEnergies.length; ++i)
+ {
+ layerEnergies[i]/=clusterEnergy;
+ }
+ //FIXME sum of cell energies does not equal cluster energy!
+// System.out.println(clusterEnergy+" "+clus.getEnergy());
+ return layerEnergies;
+ }
+
+ public void setInputClusterName(String clname)
+ {
+ _clusname = clname;
+ }
+ public void setOutputClusterName(String oclname)
+ {
+ _oclname = oclname;
+ }
+
+}
+