Print

Print


Commit in lcsim/src/org/lcsim/contrib/SteveKuhlmann on MAIN
BTrMipClusterID.java+1185added 1.1
This is PFA including ClusterID but has HMatrix commented out for now.

lcsim/src/org/lcsim/contrib/SteveKuhlmann
BTrMipClusterID.java added at 1.1
diff -N BTrMipClusterID.java
--- /dev/null	1 Jan 1970 00:00:00 -0000
+++ BTrMipClusterID.java	17 Aug 2005 17:20:57 -0000	1.1
@@ -0,0 +1,1185 @@
+//  package org.lcsim.contrib.compile.SteveMagill;
+
+//  Overall PFA algorithm includes 1) Track IL-finder in Barrel CAL, creation of EM Mip
+//  Clusters, HAD Mip Clusters, CAL Mip Clusters; 2) Nearest-Neighbor clustering of remaining
+//  hits, EM NN Clusters and AD NN CLusters; 3) Photon ID of EM NN Clusters, creation of
+//  EM Photon Clusters, EM Shower Clusters; 4) Association of EM and HAD Mip Clusters, EM Shower
+//  Clusters, and HAD NN Shower Clusters to form Track-CAL Associated Shower Clusters
+// import java.util.List;
+import java.util.*;
+import hep.physics.vec.Hep3Vector;
+import org.lcsim.event.*;
+import org.lcsim.event.EventHeader.LCMetaData;
+import org.lcsim.geometry.CalorimeterIDDecoder;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+import hep.aida.ITree;
+import org.lcsim.geometry.*;
+import org.lcsim.geometry.subdetector.CylindricalCalorimeter;
+import org.lcsim.recon.cluster.util.*;
+import org.lcsim.recon.cluster.nn.NearestNeighborCluster;
+import org.lcsim.recon.emid.hmatrix.HMatrix;
+import org.lcsim.recon.emid.hmatrix.HMatrixTask;
+import org.lcsim.recon.emid.hmatrix.HMatrixBuilder;
+import org.lcsim.recon.emid.hmatrix.HMatrixConditionsConverter;
+import org.lcsim.math.chisq.ChisqProb;
+import cjnn.backprop.*;
+
+
+public class BTrMipClusterID extends Driver
+{
+   private AIDA aida = AIDA.defaultInstance();
+   private ITree _tree;
+   private boolean _initialized = false;
+//    The number of layers in the EM Calorimeter Barrel
+   private int _numbemlayers;  
+//    The number of layers in the Hadronic Calorimeter Barrel
+   private  int _numbhadlayers;
+//    The EM Barrel sampling fraction
+   private  double  _sfEMB;  
+//    The HAD Barrel sampling fraction
+   private  double _sfHB;
+   private double _neupi;
+//    The EM calorimeter hits, by layer
+   private List[] _emBLayerHits;
+//    The HAD calorimeter hits, by layer
+   private List[] _hadBLayerHits; 
+//    The radii of the barrel calorimeter layers
+   private double[] _emBRadii;
+   private double[] _hadBRadii;
+//    modified Barrel helix swimmer for org.lcsim test
+   private Helix[] _emBSwimmers; //EM
+   private Helix[] _hadBSwimmers; //HAD
+//   Z extent of the central barrel calorimeters.
+   private double _embZ; //EM
+   private double _hadbZ; //HAD
+//   The central magnetic field strength
+   private double[] _fieldStrength;
+   
+    private HMatrixTask _task;
+    private int _nLayers;
+    CalorimeterIDDecoder _decoder;
+    private HMatrixBuilder _hmb;
+    private HMatrix _hmx;
+    private boolean debug = false;
+    private boolean trhist = true;
+    private boolean phohist = true;
+    private boolean readytotryClusterID = true;
+
+    // the number of variables in the measurement vector
+    private int _nmeas;
+    // the index of the cluster width variables
+    private int _widthIndex;
+    // we handle the energy dependence by correlating to the log of the energy
+    private static final double _log10inv = 1./Math.log(10.0);
+    // the index of the cluster energy (log base 10)
+    private int _logEIndex;
+    // the vector of measured values
+    double[] _vals;
+    
+    NetworkBasic myNet;
+    double[] probs = new double[4];
+
+
+    public BTrMipClusterID()
+    {
+        this(new NetworkBasic());
+         //FIXME: This should depend on detector
+         myNet.setResourceStream("/hep/lcd/detector/sdmnocalgaps/nn/");
+         myNet.configureNetwork("network.net");
+         // Reading the weights file isnt neccessary when weights.min
+         // is specified in the network.net file.
+         myNet.readWeights("weights.min");
+         // Read Standard file
+         // Do not do this if data was not rescaled during training.
+         myNet.readStandardFile();
+
+//         this(HMatrixTask.ANALYZE);
+    }
+    
+//    public BTrMipClusterID(HMatrixTask task)
+//    {
+//        _tree = aida.tree();
+//        _task = task;
+//        
+//        if(task==HMatrixTask.ANALYZE)
+//        {
+//            // The HMatrix could possibly change, so be sensitive to this.
+//            getConditionsManager().registerConditionsConverter(new HMatrixConditionsConverter());
+//        }
+//    } 
+    public BTrMipClusterID(NetworkBasic net)
+    {
+       myNet = net;
+    }
+
+   protected void process(EventHeader event)
+   {
+      super.process(event);
+//      System.out.println(events);
+
+//  Initialize things here
+    if(!_initialized)
+    {
+        System.out.println("Detector is "+event.getDetectorName());
+        Detector det = event.getDetector();
+        double[] zero = {0, 0, 0};
+        _fieldStrength = det.getFieldMap().getField(zero);
+        System.out.println("B Field " +_fieldStrength[2]+ " Tesla");
+        
+        CylindricalCalorimeter emb = ((CylindricalCalorimeter) det.getSubdetectors().get("EMBarrel"));
+        _embZ = emb.getZMin();
+        System.out.println("EM Barrel z " +_embZ);
+//        _numbemlayers = 30;
+        _numbemlayers = emb.getLayering().getLayerCount();
+        System.out.println("EM Barrel Layers " +_numbemlayers);
+        _emBLayerHits = new ArrayList[_numbemlayers]; 
+        _emBRadii = new double[_numbemlayers];
+        _emBSwimmers = new Helix[_numbemlayers];
+        for (int i=0; i<_numbemlayers; ++i)
+        {
+            _emBLayerHits[i] = new ArrayList();
+            _emBRadii[i]=emb.getLayering().getDistanceToLayerSensorMid(i);
+            System.out.println("EM Barrel Layer Number " +i+ " EM Barrel Radius " +_emBRadii[i]);
+            _emBSwimmers[i] = new Helix(_fieldStrength[2], _emBRadii[i], Math.abs(_embZ));
+            _emBSwimmers[i].setCurvatureFlip(true);
+        }   // end loop over em layers
+
+        CylindricalCalorimeter hadb = ((CylindricalCalorimeter) det.getSubdetectors().get("HADBarrel"));       
+        _hadbZ = hadb.getZMin();
+        System.out.println("HAD Barrel z " +_hadbZ);
+        _numbhadlayers = hadb.getLayering().getLayerCount();
+        System.out.println("HAD Barrel Layers " +_numbhadlayers);   
+        _hadBLayerHits = new ArrayList[_numbhadlayers];
+        _hadBRadii = new double[_numbhadlayers];
+        _hadBSwimmers = new Helix[_numbhadlayers];
+        for (int i=0; i<_numbhadlayers; ++i)
+        {
+        	_hadBLayerHits[i] = new ArrayList(); 
+         	_hadBRadii[i]=hadb.getLayering().getDistanceToLayerSensorMid(i);
+                System.out.println("HAD Barrel Layer Number " +i+ " HAD Barrel Radius " +_hadBRadii[i]);
+        	_hadBSwimmers[i] = new Helix(_fieldStrength[2], _hadBRadii[i], Math.abs(_hadbZ));
+        	_hadBSwimmers[i].setCurvatureFlip(true);
+        }  // end loop over had layers
+        
+        // Get (set) sampling fractions here
+        _sfEMB = 0.0120;    //  for now this is tested sampling fraction for W/Si EMCAL in SiD Detector
+//        _sfEMB = 0.0117;     //  for now, this is effective sf for 20/10 Si/W ECAL from 2 GeV gammas
+//        _sfHB = 0.000025;   //  for now this is tested sampling fraction for SS/RPC HCAL in SiD Detector
+        //  but for now, use calibration as a sampling fraction (then HAD E in a cluster is OK)
+//        _sfHB = 5.3;    // 200 MeV per hit in HCAL for SiD SSRPC, 10.0 for pions
+        _sfHB = 26.0;   // 38 MeV per hit in HCAL for CDC WScin after 1/3 mip thresold cut, 30 for pions
+        _neupi = 0.87;   // ratio of nhits neu to pi - .87 for CDC, .57 for SiD
+//        _sfHB = 0.06;  // old SDJan03 SS/Scin analog HCAL sf
+
+          CylindricalCalorimeter calsub = (CylindricalCalorimeter)event.getDetector().getSubdetectors().get("EMBarrel");
+            _nLayers = calsub.getLayering().getLayerCount();
+            System.out.println("found "+_nLayers+" layers in the EMBarrel");
+            // the vector of measurements starts as the longitudinal layers
+            _nmeas = _nLayers;
+            // add the logarithm of the energy
+            _logEIndex = _nmeas;
+            _nmeas+=1;
+            // would add any additional measurements (e.g. width) here
+            _vals = new double[_nmeas];
+            
+            //FIXME key needs to be better defined
+            int key = 0;
+            if(_task==HMatrixTask.ANALYZE)
+            {
+                //FIXME need to fetch name of HMAtrix file to use from a conditions file
+                System.out.println("Looking for HMatrix...");
+                //_hmx = getConditionsManager().getCachedConditions(HMatrix.class, 
+                //        "LongitudinalHMatrix.hmx").getCachedData();
+                String fileName = "c:/lcsim/LongitudinalHMatrix.hmx";
+                _hmx = HMatrix.read(fileName);
+            }   
+        _initialized = true;
+    }   // end of initialization section
+      
+      // get calorimeter hits by layer - need to calculate hit density for cells
+      double EMBESum=0;  double EMBESumC=0;  double HADBESum=0;
+      double EEMlowE=0;  double EEMhighE=0;
+      double ESumBTotCAL=0;
+      double hthrshld = 0;
+      double htime = 0;
+      int nhitsBTotCAL=0;
+      double[][] celbtheta = new double[5000][_numbemlayers+_numbhadlayers];
+      double[][] celbphi = new double[5000][_numbemlayers+_numbhadlayers];
+      double[][] celbden = new double[5000][_numbemlayers+_numbhadlayers];
+      long[][] celbid = new long[5000][_numbemlayers+_numbhadlayers];
+      int[] nhitbLayer = new int[_numbemlayers+_numbhadlayers];
+      int nhitsEMB=0; int nhitsHADB=0;
+      
+      // FIXME should get calorimeterhit collection names from a conditions file
+        List<CalorimeterHit> collection = event.get(CalorimeterHit.class,"EcalBarrHits");
+        _decoder = (CalorimeterIDDecoder) event.getMetaData(collection).getIDDecoder();
+
+
+      // this uses SimCalorimeterHit, but need to know name of collection for this. 
+      List<SimCalorimeterHit> embHits = event.getSimCalorimeterHits("EcalBarrHits");
+      CalorimeterIDDecoder embdecoder = (CalorimeterIDDecoder) event.getMetaData(embHits).getIDDecoder();
+      // create a map of cells keyed on their index
+      Map<Long, CalorimeterHit> embhitmap = new HashMap<Long, CalorimeterHit>();
+      
+      for (SimCalorimeterHit embhit : embHits)
+      {
+          long hitID = embhit.getCellID();
+          embdecoder.setID(hitID);
+          int emblayer = embdecoder.getLayer();
+          //System.out.println("EM layer num = " + emblayer);
+          celbtheta[nhitbLayer[emblayer]][emblayer] = embdecoder.getTheta();				  
+          celbphi[nhitbLayer[emblayer]][emblayer] = embdecoder.getPhi();
+          celbid[nhitbLayer[emblayer]][emblayer] = hitID;
+          nhitbLayer[emblayer]++;
+          embhitmap.put(hitID, embhit);
+          EMBESum += embhit.getRawEnergy();
+          //  for CDC, use this for calibrated B EM ESum
+          if (emblayer < 20)
+          {
+              EMBESumC += embhit.getRawEnergy()/_sfEMB;
+              EEMlowE += embhit.getRawEnergy();
+          }
+          else
+          {
+              EMBESumC += embhit.getRawEnergy()/(_sfEMB/2);
+              EEMhighE += embhit.getRawEnergy();
+          }
+          nhitsEMB++;    
+      }
+      if(trhist) aida.cloud1D("Nhits in original EMBhitmap").fill(embhitmap.size());
+      
+      List<SimCalorimeterHit> hadbHits = event.getSimCalorimeterHits("HcalBarrHits");
+      CalorimeterIDDecoder hadbdecoder = (CalorimeterIDDecoder) event.getMetaData(hadbHits).getIDDecoder();
+      // create a map of cells keyed on their index
+      Map<Long, CalorimeterHit> hadbhitmap = new HashMap<Long, CalorimeterHit>();
+
+      if (event.getDetectorName().equals("cdcaug05"))
+      {
+          hthrshld = 0.0005;   // 1/3 mip threshold for cdc Scin HCAL
+          htime = 100;  // 100 ns cut on hit time
+//      System.out.println("HAD Threshold = " +hthrshld);
+      }
+      for (SimCalorimeterHit hadbhit : hadbHits)
+      {
+          long hitID = hadbhit.getCellID();
+          hadbdecoder.setID(hitID);
+          int hadblayer = hadbdecoder.getLayer();
+          if(trhist) aida.cloud1D("HAD Cell Vis E before thr cut").fill(hadbhit.getRawEnergy());
+          //aida.cloud1D("HAD Cell Time before timing cut").fill(hadbhit.getTime());
+        // if (hadbhit.getRawEnergy() > hthrshld && hadbhit.getTime() < 100)
+          if (hadbhit.getRawEnergy() > hthrshld)
+          {
+              if(trhist) aida.cloud1D("HAD Cell Vis E after thr cut").fill(hadbhit.getRawEnergy());
+              celbtheta[nhitbLayer[_numbemlayers+hadblayer]][_numbemlayers+hadblayer] = hadbdecoder.getTheta();				  
+              celbphi[nhitbLayer[_numbemlayers+hadblayer]][_numbemlayers+hadblayer] = hadbdecoder.getPhi();
+              celbid[nhitbLayer[_numbemlayers+hadblayer]][_numbemlayers+hadblayer] = hitID;
+              nhitbLayer[_numbemlayers+hadblayer]++; 
+              hadbhitmap.put(hitID, hadbhit);
+              HADBESum += hadbhit.getRawEnergy();
+              nhitsHADB++;
+          }
+      }
+      if(trhist) aida.cloud1D("Nhits in original HADBhitmap").fill(hadbhitmap.size());
+
+      for (int i=0; i<_numbemlayers+_numbhadlayers; ++i)
+      {
+          if(trhist) aida.cloud2D("NHits vs layer").fill(i,nhitbLayer[i]);
+           // calculate density for each cell hit in layer - use 5X5 window?
+           for (int j=0; j<nhitbLayer[i]; ++j)
+           {
+               celbden[j][i] = 1;
+               for (int k=0; k<nhitbLayer[i]; ++k)
+               {
+                   if (k == j) continue;
+                   //  in ECAL, for 5 mm X 5 mm cells, each cell is ~ .00315 X .00315 rad
+                   //  .0075 corresponds to 5X5 without outermost 4 corners, 0.005 is 3X3
+                   //  in HCAL, for 1 X 1 cm cells, each cell is ~ .00525 X .00525 rad
+                   //  .018 is 7X7 without corners (3 cells missing at each corner)
+                   double dmine = 0.005;
+ //                  double dminh = 0.0125;
+                   double dminh = 3*dmine;
+                   double dmin = dmine;
+                   double deltheta = Math.abs(celbtheta[k][i]-celbtheta[j][i]);
+                   if(trhist) aida.cloud1D("Delta Theta Cell").fill(deltheta);
+                   double delphi = Math.abs(celbphi[k][i]-celbphi[j][i]);
+                   if (delphi > Math.PI) delphi = 2*Math.PI-delphi;
+                   if(trhist) aida.cloud1D("Delta Phi Cell").fill(delphi);
+                   double distance = Math.sqrt(deltheta*deltheta+delphi*delphi);
+                   if (i >= _numbemlayers) dmin = dminh;
+                   if (distance < dmin)
+                   {
+                       if (i < 30 && trhist) aida.cloud1D("Distance between EM CAL hits").fill(distance);
+                       if (i > 29 && trhist) aida.cloud1D("Distance between HAD CAL hits").fill(distance);
+                       if (i < 30) celbden[j][i] += 0.00315/distance;
+                       if (i > 29) celbden[j][i] += 0.00525/distance;
+                   }
+               }
+               if (nhitbLayer[i] > 0 && i < 30 && trhist) aida.cloud1D("EM CAL cell density").fill(celbden[j][i]);
+               if (nhitbLayer[i] > 0 && i > 29 && trhist) aida.cloud1D("HAD CAL cell density").fill(celbden[j][i]);
+               if (nhitbLayer[i] > 0 && trhist) aida.cloud2D("CAL cell density vs Layer").fill(i,celbden[j][i]);
+               if (i == 0 && trhist) aida.cloud1D("Cell Density layer 0").fill(celbden[j][i]);
+           }     
+      }
+      ESumBTotCAL = EMBESum + HADBESum;
+      nhitsBTotCAL = nhitsEMB + nhitsHADB;
+      if(trhist) aida.cloud2D("NHits EM B vs EM B Calib ESum").fill(EMBESumC,nhitsEMB);
+      if(trhist) aida.cloud2D("NHits HAD B vs HAD B Vis ESum").fill(HADBESum,nhitsHADB);
+//      aida.cloud1D("Pho E first 20").fill(EEMlowE);
+//      aida.cloud1D("Pho E last 10").fill(EEMhighE);
+      if (EMBESumC < 0.40)
+      {
+          if(trhist) aida.cloud1D("EM B Calib ESum, mips only").fill(EMBESumC);
+          if(trhist) aida.cloud1D("NHits EM B, mips only").fill(nhitsEMB); 
+          if(trhist) aida.cloud1D("NHits HAD B, EM B mips only").fill(nhitsHADB);
+          if(trhist) aida.cloud1D("HAD B ESum, EM mips only").fill(nhitsHADB/_sfHB);
+      }
+      if (nhitsEMB < 1 && nhitsHADB > 0)
+      {
+          if(trhist) aida.cloud1D("NHits HAD B, no EM B hits").fill(nhitsHADB);
+          if(trhist) aida.cloud1D("HAD B Vis ESum, no EM B Hits").fill(HADBESum);
+          if(trhist) aida.cloud1D("HAD B Cal ESum, no EM B Hits").fill(nhitsHADB/_sfHB);
+      }
+      if (nhitsHADB < 1 && nhitsEMB > 0)
+      {
+          if(trhist) aida.cloud1D("NHits EM B, no HAD B hits").fill(nhitsEMB);
+      }
+      if(trhist) aida.cloud1D("Total B CAL NHits").fill(nhitsBTotCAL);     
+      if(trhist) aida.cloud1D("Total B CAL Vis ESum").fill(ESumBTotCAL);
+      if(trhist) aida.cloud1D("Total B CAL EM Analog + HAD Digital ESum").fill(EMBESumC+nhitsHADB/_sfHB);
+      if(trhist) aida.cloud1D("Total EM B Vis ESum").fill(EMBESum);
+      if(trhist) aida.cloud1D("Total EM B Calib ESum").fill(EMBESumC);
+      if(trhist) aida.cloud1D("Total HAD B Vis ESum").fill(HADBESum);
+
+      //  Make a new collection that only includes had hits above threshold - for Event display
+      List<CalorimeterHit> hlist = new ArrayList(hadbhitmap.values());
+      LCMetaData meta = event.getMetaData(hadbHits);
+      event.put("ThrHcalBHits",hlist,CalorimeterHit.class,meta.getFlags(),meta.getName());
+      
+      
+      /** This section finds mip clusters in cal attached to a reconstructed (or true) track.
+       *  The starting point is the first layer of the ECAL and the endpoint is the point
+       *  of first interaction in the CAL.  This end point can be used to start Track/CAL
+       *  object association later.
+       *    1) get list of reconstructed tracks or true MC charged particles (determined by
+       *       user
+       *    2) loop over all tracks, swim to layer, check for mip hit.
+       *    3) continue until either a) no hit in next layer, or b) large energy or density of hits
+       *       is found in next layer.
+       *    4) list of hits forms a cluster. 
+      **/
+      //  Here is the track list - either use cheater or rec tracks
+//      List<List<FoundTrack>> trackCollections = event.get(FoundTrack.class);
+//      List<Track> chptrks = event.getTracks(); 
+      
+      //  First, this is for true tracks - MC charged final states
+      int ntrkswim = 0;
+      double[][] trbtheta = new double[200][_numbemlayers+_numbhadlayers];
+      double[][] trbphi = new double[200][_numbemlayers+_numbhadlayers];
+      double[] trbpE = new double[200];
+      double[] trbpp = new double[200];
+      double[] trbpct = new double[200];
+      double[] trbppt = new double[200];
+      List<MCParticle> mcps = event.getMCParticles();
+      for (MCParticle mcp : mcps)
+      {
+          //  must be final state particle
+          if (mcp.getGeneratorStatus() != mcp.FINAL_STATE) continue;
+//          if (mcp.getGeneratorStatus() == mcp.DOCUMENTATION || mcp.getGeneratorStatus() == 0) continue; 
+//          System.out.println("Generator Status " +mcp.getGeneratorStatus());
+          // must be charged particle
+          int iq = (int)mcp.getCharge();
+          if (iq == 0) continue;
+          Hep3Vector origin = mcp.getOrigin();
+          Hep3Vector momentum = mcp.getMomentum();
+          double chpE = mcp.getEnergy();
+          double chpmom = Math.sqrt(mcp.getPX()*mcp.getPX()+mcp.getPY()*mcp.getPY()+mcp.getPZ()*mcp.getPZ());
+          double chpct = mcp.getPZ()/chpmom;
+          double chppt = Math.sqrt(mcp.getPX()*mcp.getPX()+mcp.getPY()*mcp.getPY());
+          if(trhist) aida.cloud1D("All Ch Particle pT").fill(chppt);
+          //  cut on barrel for now - later add endcaps
+          if (Math.abs(chpct)<0.80)
+          {
+          if(trhist) aida.cloud1D("Ch Particle pT in Barrel").fill(chppt);
+          //  only check tracks that make it into CAL
+          //  0.9525 GeV to get to inner radius of ECAL, 0.975 GeV to get to shower max of ECAL, 1.04 GeV
+          //  to get to inner radius of HCAL, 1.75 GeV to get to outer radius of HCAL
+          if (chppt>0.9525)
+          {
+              if(trhist) aida.cloud1D("Ch Particle pT for swimming").fill(chppt);
+              //    swim through the EM Barrel calorimeter
+	      for(int i=0; i<_numbemlayers; ++i)
+              {
+                  _emBSwimmers[i].swim(momentum, origin, iq);
+                  double[] intersection = _emBSwimmers[i].getIntersect();
+		  double phi = Math.atan2(intersection[1],intersection[0]);
+		  if(phi<0) phi+=2*Math.PI;
+                  if(trhist) aida.cloud1D("Track Phi").fill(phi);
+		  double theta = Math.atan(_emBRadii[i]/intersection[2]);
+		  if(theta<0) theta+=Math.PI;
+                  if(trhist) aida.cloud1D("Track Theta").fill(theta);
+		  trbtheta[ntrkswim][i] = theta;
+		  trbphi[ntrkswim][i] = phi;
+              }
+              //  now swim through HAD Barrel Calorimeter
+	      for(int i=0; i<_numbhadlayers; ++i)
+              {
+                  _hadBSwimmers[i].swim(momentum, origin, iq);
+                  double[] intersection = _hadBSwimmers[i].getIntersect();
+		  double phi = Math.atan2(intersection[1],intersection[0]);
+		  if(phi<0) phi+=2*Math.PI;
+		  double theta = Math.atan(_hadBRadii[i]/intersection[2]);
+		  if(theta<0) theta+=Math.PI;
+		  trbtheta[ntrkswim][i+_numbemlayers] = theta;
+		  trbphi[ntrkswim][i+_numbemlayers] = phi;
+              }
+              trbpp[ntrkswim] = chpmom;
+              trbpE[ntrkswim] = chpE;
+              trbpct[ntrkswim] = chpct;
+              trbppt[ntrkswim] = chppt;
+              ntrkswim++;
+          }
+          } else if (Math.abs(chpct)>0.8 && Math.abs(chpct)<0.993)
+          {
+              // endcap tracks
+              if(trhist) aida.cloud1D("End Cap Track pT").fill(chppt);
+          } else
+          {
+              // beampipe tracks
+              if(trhist) aida.cloud1D("Beampipe Track pT").fill(chppt);
+          }
+      }
+      if(trhist) aida.cloud1D("Number of swum Tracks").fill(ntrkswim);
+      
+      //  Now, can find interaction layer by comparing track position with
+      //  CAL hits - first loop over all barrel tracks
+      List<BasicCluster> embmipclusters = new ArrayList();
+      List<BasicCluster> hadbmipclusters = new ArrayList();
+      List<BasicCluster> calbmipclusters = new ArrayList();
+      List<Integer> itracks = new ArrayList();
+      //  map track to mip clusters
+      Map<Integer, BasicCluster> trmipclusmap = new HashMap<Integer, BasicCluster>();
+      Map<Integer, BasicCluster> tremipmap = new HashMap<Integer, BasicCluster>();
+      Map<Integer, BasicCluster> trhmipmap = new HashMap<Integer, BasicCluster>();
+      int[] IntLay = new int[200];
+      int[] nhitMipClus = new int[200];
+      long[][] hitidMipClus = new long[200][1000];
+      for (int i=0; i<ntrkswim; ++i)
+      {
+          itracks.add(i);
+          // map integer into cluster to link tracks and mips
+          Map<Long, CalorimeterHit> ebclhitmap = new HashMap<Long, CalorimeterHit>();
+          Map<Long, CalorimeterHit> hbclhitmap = new HashMap<Long, CalorimeterHit>();
+          BasicCluster ebmclus = new BasicCluster();
+          BasicCluster hbmclus = new BasicCluster();
+          BasicCluster bmipclus = new BasicCluster();
+          boolean nointeraction = true;
+          IntLay[i] = _numbemlayers+_numbhadlayers;     // set to maximum for each track to start
+          //  now, loop over all Barrel layers
+          for (int j=0; j<_numbemlayers+_numbhadlayers; ++j)
+          {
+              //  continue if no interaction yet, if already have interaction, skip
+              if (nointeraction)
+              {
+                  double prden = 0;
+                  int ntcmtch = 0;
+                  long[] layerids = new long[200];
+                  // loop over all hits in this layer
+                  for (int k=0; k<nhitbLayer[j]; ++k)
+                  {
+                      // here is some diagnostic stuff - only need for layer 0, some for first 5 layers
+                      if (j == 0)
+                      {
+                          if(trhist) aida.cloud1D("DIAG - CAL cell theta").fill(celbtheta[k][j]);
+                          if(trhist) aida.cloud1D("DIAG - CAL cell cos theta").fill(Math.cos(celbtheta[k][j]));
+                          if(trhist) aida.cloud1D("DIAG - CAL cell phi").fill(celbphi[k][j]);
+                          if(trhist) aida.cloud2D("DIAG - CAL cell Theta vs Phi").fill(celbphi[k][j],celbtheta[k][j]);
+                          if(trhist) aida.cloud1D("DIAG - Track theta").fill(trbtheta[i][j]);
+                          if(trhist) aida.cloud1D("DIAG - Track cos theta").fill(Math.cos(trbtheta[i][j]));
+                          if(trhist) aida.cloud1D("DIAG - Track phi").fill(trbphi[i][j]);
+                          if(trhist) aida.cloud2D("DIAG - Track Theta vs Phi").fill(trbphi[i][j],trbtheta[i][j]);
+                      }
+                      if (j == 1 && trhist) aida.cloud2D("DIAG - CAL cell Theta vs Phi 1").fill(celbphi[k][j],celbtheta[k][j]);
+                      if (j == 2 && trhist) aida.cloud2D("DIAG - CAL cell Theta vs Phi 2").fill(celbphi[k][j],celbtheta[k][j]);
+                      if (j == 3 && trhist) aida.cloud2D("DIAG - CAL cell Theta vs Phi 3").fill(celbphi[k][j],celbtheta[k][j]);
+                      if (j == 4 && trhist) aida.cloud2D("DIAG - CAL cell Theta vs Phi 4").fill(celbphi[k][j],celbtheta[k][j]);
+
+                      //  get cell-track distances and test for matches
+                      double trcedmin = 0.0075;          // eventually make this an argument for the mip cluster code - make min .0075 for ECAL and 2*min for HCAL
+                      if (j >= _numbemlayers) trcedmin = 2*trcedmin;    
+                      double dtheta = Math.abs(celbtheta[k][j] - trbtheta[i][j]);
+                      double dphi = Math.abs(celbphi[k][j] - trbphi[i][j]);
+                      if (dphi > Math.PI) dphi = 2*Math.PI-dphi;
+                      dphi = Math.abs(dphi);
+                      if(trhist) aida.cloud1D("DIAG - delta theta track cell").fill(dtheta);
+                      if(trhist) aida.cloud1D("DIAG - delta phi track cell").fill(dphi);
+                      double trcedist = Math.sqrt(dtheta*dtheta+dphi*dphi);
+                      if (j == 0 && trhist) aida.cloud1D("DIAG - Track-Cell Distance in Layer 0").fill(trcedist);
+                      if (j < 30 && trhist) aida.cloud1D("DIAG - EM Track-Cell Distance").fill(trcedist);
+                      if (j > 29 && trhist) aida.cloud1D("DIAG - HAD Track-Cell Distance").fill(trcedist);
+                      if (trcedist < 2*trcedmin && trhist) aida.cloud1D("Near Track-Cell Distance").fill(trcedist);
+                      if (trcedist < trcedmin)
+                      {
+                          if (j < _numbemlayers && trhist) aida.cloud1D("DIAG - Matched Track-Cell Distance EM").fill(trcedist);
+                          if (j >= _numbemlayers && trhist) aida.cloud1D("DIAG - Matched Track-Cell Distance HAD").fill(trcedist);
+                          prden += celbden[k][j];
+                          layerids[ntcmtch] = celbid[k][j];
+                          ntcmtch++;
+                      }
+                  }
+                  if(trhist) aida.cloud1D("Density Sum per layer").fill(prden);
+                  if (ntcmtch > 0) prden = prden/ntcmtch;
+                  //  should probably make this an argument - the density sum limit
+                  if (prden == 0 || prden > 2.5)
+                  {
+                      // have interaction - form mip cluster from hits in cluster hitmap
+                      nointeraction = false;
+                      IntLay[i] = j;
+                      if(trhist) aida.cloud1D("Cell Density Sum for interactions").fill(prden);
+//                      if (j == 0 && trhist) aida.cloud1D("Cell Density for matches in layer 0").fill(prden);
+//                      if (j == 0 && prden == 0 && trhist) aida.cloud1D("Track pT for matches in Layer 0").fill(trbppt[i]);
+//                      if (j == 0 && prden == 0 && trhist) aida.cloud1D("Track theta for matches in Layer 0").fill(trbtheta[i][j]);
+                      if (j == 0) continue;    // if interaction in first layer, just get out
+                      if (j < _numbemlayers)
+                      {
+                          // only have EM cluster
+                          if(trhist) aida.cloud1D("DIAG - Nhits in EMClus hitmap for match").fill(ebclhitmap.size());
+                          if (ebclhitmap.size() > 0)
+                          {
+                              for (CalorimeterHit h : ebclhitmap.values())
+                              {
+                                  if (h == null) continue;
+                                  ebmclus.addHit(h);   //  add hit to EM B Mip cluster
+                                  bmipclus.addHit(h);  //  add hit to CAL B Mip Cluster
+                              }
+                          }
+                          embmipclusters.add(ebmclus);
+                          calbmipclusters.add(bmipclus);
+                      } else
+                      {
+                          // can have both EM and HAD clusters
+                          if (ebclhitmap.size() > 0)
+                          {
+                              for (CalorimeterHit eh : ebclhitmap.values())
+                              {
+                                  if (eh == null) continue;
+                                  ebmclus.addHit(eh);
+                                  bmipclus.addHit(eh);
+                              }
+                          }
+                          embmipclusters.add(ebmclus);
+                          if (j != _numbemlayers)   //  don't make hadmipcluster if interaction in first layer of HCAL
+                          {
+                              if (hbclhitmap.size() > 0)
+                              {
+                                  for (CalorimeterHit hh : hbclhitmap.values())
+                                  {
+                                      if (hh == null) continue;
+                                      hbmclus.addHit(hh);
+                                      bmipclus.addHit(hh);
+                                  }
+                              }
+                              hadbmipclusters.add(hbmclus);
+                          }
+                          calbmipclusters.add(bmipclus);                          
+                      }
+                      trmipclusmap.put(i, bmipclus);
+                      tremipmap.put(i,ebmclus);
+                      trhmipmap.put(i,hbmclus);
+                  } else
+                  {
+                      // add hits to temporary mip cluster, remove hits from hitmap
+                      if(trhist) aida.cloud1D("Cell Density for Noninteractions").fill(prden);
+                      for (int l=0; l<ntcmtch; ++l)
+                      {
+                          hitidMipClus[i][nhitMipClus[i]] = layerids[l];
+                          nhitMipClus[i]++;
+                          if (j < _numbemlayers) 
+                          {
+                              CalorimeterHit t = embhitmap.get(layerids[l]);
+                              ebclhitmap.put(layerids[l],t);
+                              embhitmap.remove(layerids[l]);
+                          }
+                          if (j >= _numbemlayers) 
+                          {
+                              CalorimeterHit t = hadbhitmap.get(layerids[l]);
+                              hbclhitmap.put(layerids[l],t);
+                              hadbhitmap.remove(layerids[l]);
+                          }
+                      }
+                  }
+                  if (j == 63 && nointeraction)
+                  {
+                      if (ebclhitmap.size() > 0)
+                      {
+                          for (CalorimeterHit eh : ebclhitmap.values())
+                          {
+                              ebmclus.addHit(eh);
+                              bmipclus.addHit(eh);
+                          }
+                      }
+                      embmipclusters.add(ebmclus);
+                      if (hbclhitmap.size() > 0)
+                      {
+                          for (CalorimeterHit hh : hbclhitmap.values())
+                          {
+                              hbmclus.addHit(hh);
+                              bmipclus.addHit(hh);
+                          }
+                      }
+                      hadbmipclusters.add(hbmclus);
+                      calbmipclusters.add(bmipclus);                          
+                      trmipclusmap.put(i,bmipclus);
+                      tremipmap.put(i,ebmclus);
+                      trhmipmap.put(i,hbmclus);
+                  }
+              }              
+          }
+      }
+      //  Name the mip clusters and add to event
+      if (embmipclusters.size() > 0) event.put("EBMipClusters",embmipclusters);
+      if (hadbmipclusters.size() > 0) event.put("HBMipClusters",hadbmipclusters);
+      if (calbmipclusters.size() > 0) event.put("CALBMipClusters",calbmipclusters);
+      //  Done with mip cluster finding
+      
+      // some histograms
+      if(trhist) aida.cloud1D("Nhits in EMBhitmap after MipClus").fill(embhitmap.size());
+      if(trhist) aida.cloud1D("Nhits in HADBhitmap after MipClus").fill(hadbhitmap.size());
+      if(trhist) aida.cloud1D("Num EM B Mip Clusters").fill(embmipclusters.size());
+      if(trhist) aida.cloud1D("Num HAD B Mip Clusters").fill(hadbmipclusters.size());
+      if(trhist) aida.cloud1D("Num CAL B Mip Clusters").fill(calbmipclusters.size());
+      
+      for (BasicCluster embmipcluster : embmipclusters)
+      {
+          if(trhist) aida.cloud1D("EM B Mip Cluster E").fill(embmipcluster.getRawEnergy()/_sfEMB);
+          if(trhist) aida.cloud1D("EM B Mip Cluster NHits").fill(embmipcluster.getCalorimeterHits().size());
+      }
+      for (BasicCluster hadbmipcluster : hadbmipclusters)
+      {
+          if(trhist) aida.cloud1D("HAD B Mip Cluster NHits").fill(hadbmipcluster.getCalorimeterHits().size());
+      }
+      
+      //  make a map linking hits to mip clusters
+      Map<CalorimeterHit, BasicCluster> calmiphitclus = new HashMap<CalorimeterHit, BasicCluster>();
+      for (BasicCluster calbmipcluster : calbmipclusters)
+      {
+          if(trhist) aida.cloud1D("CAL B Mip Cluster NHits").fill(calbmipcluster.getCalorimeterHits().size());
+          for (int i=0; i<calbmipcluster.getCalorimeterHits().size(); ++i)
+          {
+              CalorimeterHit cmiph = calbmipcluster.getCalorimeterHits().get(i);
+              calmiphitclus.put(cmiph,calbmipcluster);
+          }
+      }
+      
+      for (int i=0; i<ntrkswim; ++i)
+      {
+          if(trhist) aida.cloud1D("Number of mip hits per Track").fill(nhitMipClus[i]);
+          if(trhist) aida.cloud1D("CAL Interaction Layer per Track").fill(IntLay[i]);
+          if(trhist) aida.cloud2D("Number of mip hits vs Int Layer").fill(IntLay[i],nhitMipClus[i]);
+//          System.out.println("Track " +i+ " IL " +IntLay[i]+ " Energy " +trbpE[i]+ " Momentum " +trbpp[i]+ " Phi " +trbphi[i][0]);
+      }
+      //  End of Track-Mip section - break here eventually
+      
+      //  Now, go on to Track-Shower matching using endpoint of matched Track-Mips
+      //  first, cluster remaining hits with nearest neighbor clusterer
+      //  e.g., 1,1,0,1 gives nn clusters in each layer with min hits of 1
+      //  first, make a copy of hitmaps to send to NN clusterer (deletes all hits)
+      Map<Long, CalorimeterHit> ebnnhitmap = new HashMap<Long, CalorimeterHit>();
+      ebnnhitmap.putAll(embhitmap);
+      Map<Long, CalorimeterHit> hbnnhitmap = new HashMap<Long, CalorimeterHit>();
+      hbnnhitmap.putAll(hadbhitmap);
+      
+      int _dU = 2;
+      int _dV = 2;
+      int _dLayer = 2;
+      int _minNcells = 4;
+      // get EM clusters
+      List<BasicCluster> ebclusters = new ArrayList();
+      for(;;)
+      {
+         if (ebnnhitmap.isEmpty()) break;
+         Long k = ebnnhitmap.keySet().iterator().next();
+         CalorimeterHit emhit = ebnnhitmap.get(k);
+         // start a cluster, constructor will aggregate remaining hits unto itself
+         NearestNeighborCluster nnclus = new NearestNeighborCluster(embdecoder, ebnnhitmap, emhit, k, _dU, _dV, _dLayer);
+         if(nnclus.getCalorimeterHits().size()>_minNcells)
+         {
+            ebclusters.add(nnclus);
+         }
+      }
+//      String name = event.getMetaData(collection).getName();
+      if (ebclusters.size() > 0) event.put("EMBNNClusters",ebclusters);
+      for (BasicCluster ebcluster : ebclusters)
+      {
+          for (int i=0; i<ebcluster.getCalorimeterHits().size(); ++i)
+          {
+              // remove cluster hits from EM B hitmap
+              CalorimeterHit ebh = ebcluster.getCalorimeterHits().get(i);
+              long ebhid = ebh.getCellID();
+              embhitmap.remove(ebhid);
+          }
+      }
+      if(trhist) aida.cloud1D("Nhits in EMBhitmap after NN clustering").fill(embhitmap.size());
+      //might be able to ID photons with these clusters? - see below
+      
+      _dU = 4;
+      _dV = 4;
+      _dLayer = 4;
+      _minNcells = 2;
+      // get had clusters
+      List<BasicCluster> hbclusters = new ArrayList();
+      for (;;)
+      {
+          if (hbnnhitmap.isEmpty()) break;
+          Long k = hbnnhitmap.keySet().iterator().next();
+          CalorimeterHit hadhit = hbnnhitmap.get(k);
+          NearestNeighborCluster nnclus = new NearestNeighborCluster(hadbdecoder, hbnnhitmap, hadhit, k, _dU, _dV, _dLayer);
+//          aida.cloud1D("Num of hits in had cluster").fill(nnclus.getCalorimeterHits().size());
+          if (nnclus.getCalorimeterHits().size()>_minNcells)
+          {
+              hbclusters.add(nnclus);
+//              aida.cloud1D("Num of hits in had cluster").fill(nnclus.getCalorimeterHits().size());
+          }
+      }
+
+      //  map hits in cluster to their cluster for combining later
+      Map<CalorimeterHit, BasicCluster> hbnnhitclus = new HashMap<CalorimeterHit, BasicCluster>();
+      for (BasicCluster hbcluster : hbclusters)
+      {
+          for (int i=0; i<hbcluster.getCalorimeterHits().size(); ++i)
+          {
+              // remove cluster hits from HAD B hitmap
+              CalorimeterHit hbh = hbcluster.getCalorimeterHits().get(i);
+              long hbhid = hbh.getCellID();
+              hadbhitmap.remove(hbhid);
+              // fill cluster-hit map
+              hbnnhitclus.put(hbh,hbcluster);
+          }
+      }
+      if(trhist) aida.cloud1D("Nhits in HADBhitmap after NN clustering").fill(hadbhitmap.size());
+      if(trhist) aida.cloud1D("Num HAD B NNClusters").fill(hbclusters.size());
+      for (BasicCluster hbcluster : hbclusters)
+      {
+          if(trhist) aida.cloud1D("Num of hits in HAD B NNCluster").fill(hbcluster.getCalorimeterHits().size());
+          if(trhist) aida.cloud1D("Vis Energy of HAD B NNCluster").fill(hbcluster.getEnergy());
+          if(trhist) aida.cloud2D("VisE Clus vs Nhits in HAD B NN Cluster").fill(hbcluster.getCalorimeterHits().size(),hbcluster.getEnergy());
+      }
+      
+      // PHOTON-Finder here (use this QAD one for now)
+      //  check these clusters and try to ID photons - look for track-cluster matches at shower max
+      //  require that EM cluster has hits at this layer.
+      if(trhist) aida.cloud1D("Num EM B NNClusters").fill(ebclusters.size());
+      List<BasicCluster> phoclusters = new ArrayList();
+      List<BasicCluster> ebshowclusters = new ArrayList();
+      int nGoodClusters = 0;
+      for (BasicCluster ebcluster : ebclusters)
+      {
+          boolean badphoton = true;
+        // Get theta, phi for cluster
+              double ecp[] = ebcluster.getPosition();
+              double ecpx = ecp[0];
+              double ecpy = ecp[1];
+              double ecpz = ecp[2];
+              double ecR = Math.sqrt(ecpx*ecpx+ecpy*ecpy);
+              double ebclth = Math.atan(ecR/ecpz);
+              if (ebclth<0) ebclth+=Math.PI;
+              double ebclph = Math.atan2(ecpy,ecpx);
+              if (ebclph<0) ebclph+=2*Math.PI;
+
+        //  Now, check Track-NNCluster distance for match at track swim layer 8 (shower max?)
+          double trcldelth=0;  double trcldelph=0;  double trcldist=0;
+          int ntrclmtch=0;
+          for (int i=0; i<ntrkswim; ++i)
+          {
+              trcldelth = Math.abs(ebclth-trbtheta[i][7]);
+              if(trhist) aida.cloud1D("Cluster-Track theta diff").fill(trcldelth);
+              trcldelph = Math.abs(ebclph-trbphi[i][7]);
+              if (trcldelph > Math.PI) trcldelph = 2*Math.PI-trcldelph;
+              if(trhist) aida.cloud1D("Cluster-Track phi diff").fill(trcldelph);
+              trcldist = Math.sqrt(trcldelth*trcldelth+trcldelph*trcldelph);
+              if(trhist) aida.cloud1D("Track-Cluster Distance").fill(trcldist);
+              if (trcldist<0.03) ntrclmtch++;
+          }
+          if (ntrclmtch == 0) {
+            // First do calculations with layer energies and longitudinal energy fractions...
+
+            int firstL=-99, lastL=0;
+            double aveLE5=0.;
+            double aveHits;
+            double[] layerE = layerEnergies(ebcluster);
+            for(int i=0; i<layerE.length; ++i)
+              {
+                _vals[i] = layerE[i];
+                if(layerE[i]>0 && firstL==-99) firstL=i;
+                if(layerE[i]>0 && i>lastL) lastL=i;
+                if(phohist) aida.cloud2D("Fractional Energy vs Layer").fill(i,layerE[i]);
+              }
+            int length=lastL-firstL+1;  
+            double firstDdiff = (firstL + 1)*1.0/length;// *1.0 to force int to double
+            int maxL=firstL+5;
+            if(maxL>30) maxL=30;
+            for(int i=firstL; i<maxL; i++) aveLE5 += layerE[i];
+            aveLE5 = aveLE5/5.;
+            int ncells = ebcluster.getCalorimeterHits().size();
+            aveHits = ncells/length;
+
+            if(ncells>10)
+            {
+                nGoodClusters++;
+                double energy = ebcluster.getRawEnergy();
+                if(phohist) aida.cloud1D("Number of cells in cluster").fill(ncells);
+                if(phohist) aida.cloud1D("energy").fill(energy);
+                // Add the correlation to the log of the cluster energy
+                //We want the common logarithm (log 10) of the energy
+                _vals[_logEIndex]=Math.log(energy)*_log10inv;
+                // have now filled the vector of measurements. need to either accumulate the HMatrix or apply it
+//                if (_task==HMatrixTask.ANALYZE)
+//                {
+//		    if(phohist) aida.cloud1D("nmeas").fill(_nmeas);
+//                    double chisq = _hmx.chisquared(_vals);
+//                    if(phohist) aida.cloud1D("Chisq").fill(chisq);
+//                    if(phohist) aida.cloud2D("Chisq vs energy").fill(energy,chisq);
+//                    if(phohist) aida.cloud1D("Chisq Probability").fill(ChisqProb.gammq(_nmeas,chisq));
+//                    double chisqD = _hmx.chisquaredDiagonal(_vals);
+//                    if(phohist) aida.cloud1D("ChisqD").fill(chisqD);
+//                    if(phohist) aida.cloud2D("ChisqD vs energy").fill(energy,chisqD);                    
+//                    double chiprobD = ChisqProb.gammq(_nmeas,chisqD);
+//                    if(chiprobD<0.0000000001) chiprobD=0.0000000001;
+//                    if(phohist) aida.cloud1D("ChisqD Probability").fill(chiprobD);
+//                    double logchiprobD = Math.log(chiprobD)*_log10inv;
+//                    if(phohist) aida.cloud1D("Log ChisqD Probability").fill(logchiprobD);
+//                    
+//                    //System.out.println("chisqD "+chisqD+" prob "+ChisqProb.gammq(_nmeas,chisqD));
+//                    
+//		    double chisqND = chisq - chisqD;
+//		    if(phohist) aida.cloud1D("ChisqND").fill(chisqND);
+//                    if (logchiprobD > -9.5)
+//                    {
+//                      phoclusters.add(ebcluster);
+//                      badphoton = false;
+//                    } 
+//                }
+                if(readytotryClusterID) {
+                  MakeClusterIDInputLCSIM make = new MakeClusterIDInputLCSIM();
+                  double[] input;
+                  input = make.getInput( ebcluster ); 
+                  input[5] = firstL;
+                  input[6] = lastL;
+                  input[7] = length;
+                  input[8] = firstDdiff;
+                  input[9] = aveLE5;
+                  input[11] = aveHits;
+                  double[] prob; // array to hold probabilities, output by the NN
+                  prob = myNet.ffOutStandard(input);
+                  double sum = 0;
+                    for ( int i = 0; i < 6 ; i++ )
+                    {
+                        sum = sum + prob[i];
+                    }
+          
+                    double max = 0;
+                    int nmax = 0;
+                    for ( int i = 0; i<6 ; i++ )
+                    {
+                     if ( prob[i] > max )
+                        {
+                            max = prob[i];
+                            nmax = i;
+                        }
+                     }
+                    max = max/sum;
+                  probs[0] = prob[0]/sum;
+                  probs[1] = prob[1]/sum;
+                  probs[2] = prob[2]/sum;
+                  probs[3] = Math.max(prob[3]/sum,prob[4]/sum);
+                  System.out.println(probs[0]+" "+probs[1]+" "+probs[2]+" "+probs[3]);
+                }
+
+            }
+          }
+          if(badphoton) ebshowclusters.add(ebcluster);
+      }  // end loop over photon candidates
+      
+      if(phohist) aida.cloud1D("number of found photon candidates (above hits threshold)").fill(nGoodClusters);
+      if (phoclusters.size() > 0) event.put("PhoBClusters",phoclusters);
+      
+      double PhoClusE = 0;
+      if(phohist) aida.cloud1D("Num Photon B Clusters").fill(phoclusters.size());
+      for (BasicCluster phocluster : phoclusters)
+      {
+          PhoClusE += phocluster.getEnergy();
+      }
+      if(phohist) aida.cloud1D("Photon Cluster E").fill(PhoClusE);
+                
+      //  make a map linking hits to EBShower clusters
+      Map<CalorimeterHit, BasicCluster> ebshowhitclus = new HashMap<CalorimeterHit, BasicCluster>();
+      for (BasicCluster ebshowcluster : ebshowclusters)
+      {
+          for (int i=0; i<ebshowcluster.getCalorimeterHits().size(); ++i)
+          {
+              // get CAL hits and put in map
+              CalorimeterHit ebsh = ebshowcluster.getCalorimeterHits().get(i);
+              ebshowhitclus.put(ebsh,ebshowcluster);
+          }          
+      }
+
+      //  Now that mips and photons are found, try to link Mip clusters, EBShower Clusters and HADBNN
+      //  clusters with tracks to form the complete charged hadron shower.
+      //  Start with the tracks
+      List<BasicCluster> trkcalclusters = new ArrayList();
+      List<BasicCluster> neucalclusters = new ArrayList();
+      Map<Integer, BasicCluster> trcaclusmap = new HashMap<Integer, BasicCluster>();
+      Map<Integer, BasicCluster> treshmap = new HashMap<Integer, BasicCluster>();
+      Map<Integer, BasicCluster> trhshmap = new HashMap<Integer, BasicCluster>();
+      
+//      for (int i=0; i<ntrkswim; ++i)
+//      System.out.println("Number of Tracks in List = " + itracks.size());
+      for (Integer i : itracks)
+      {
+          BasicCluster tcclus = new BasicCluster();
+          double trmipE = 0;
+          double treshE = 0;
+          double trhshE = 0;
+          double trtotE = 0;
+          int niter = 1;
+//          if(trhist) aida.cloud1D("Track Momentum").fill(trbpp[i]);
+          //  first, check for track-cell match with a mip cluster at last mip layer (IntLay[i]-1)
+//          System.out.println("IL = " + IntLay[i]);
+          if (IntLay[i] == _numbemlayers+_numbhadlayers)
+          {
+              //  don't do anything - all mips - no E/P test
+              tcclus.addCluster(trmipclusmap.get(i));
+          } else if (IntLay[i] < _numbemlayers+_numbhadlayers)
+          {
+              if (IntLay[i] > 0)
+              {
+                  tcclus.addCluster(trmipclusmap.get(i));
+                  double emmipE = tremipmap.get(i).getRawEnergy();
+                  double hmipE = trhmipmap.get(i).getCalorimeterHits().size();
+                  trmipE += emmipE+hmipE;
+                  if(trhist) aida.cloud1D("Mip E per Track").fill(trmipE);
+                  if(trhist) aida.cloud1D("Track E over P after adding mips").fill(trmipE/trbpp[i]);
+              }
+              do {
+              for (Iterator<BasicCluster> ieclus = ebshowclusters.iterator(); ieclus.hasNext();)
+              {
+                  BasicCluster eclus = ieclus.next();
+                  List<CalorimeterHit> ebshowhits = eclus.getCalorimeterHits();
+                  int ntsh=0;
+                  double tsd=99;
+                  for (CalorimeterHit ebshowhit : ebshowhits)
+                  {
+                      // compare hit-track distance
+                      double[] shhpos = ebshowhit.getPosition();
+                      double shR = Math.sqrt(shhpos[0]*shhpos[0]+shhpos[1]*shhpos[1]);
+                      double shhth = Math.atan(shR/shhpos[2]);
+                      if(shhth<0) shhth+=Math.PI;
+                      double shhph = Math.atan2(shhpos[1],shhpos[0]);
+	              if(shhph<0) shhph+=2*Math.PI;
+                      double tsdelth = Math.abs(shhth-trbtheta[i][IntLay[i]]);
+                      double tsdelph = Math.abs(shhph-trbphi[i][IntLay[i]]);
+                      if (tsdelph > Math.PI) tsdelph = 2*Math.PI-tsdelph;
+                      tsd = Math.sqrt(tsdelth*tsdelth+tsdelph*tsdelph);
+                      if (tsd < 0.1 && trhist) aida.cloud1D("Track-Cell Distance in Track EM Shower Cluster").fill(tsd);
+                      if (tsd<niter*0.01) ntsh++;
+                  }
+                  if(trhist) aida.cloud1D("Number of cell matches per EM shower").fill(ntsh);
+                  if (ntsh > 0) 
+                  {
+                      tcclus.addCluster(eclus);
+                      treshE += eclus.getEnergy();
+                      if(trhist) aida.cloud1D("Track E over P after adding EM shower").fill((trmipE+treshE)/trbpp[i]);
+                      treshmap.put(i, eclus);
+                      ieclus.remove();
+                  }
+              }
+              for (Iterator<BasicCluster> ihclus = hbclusters.iterator(); ihclus.hasNext();)
[truncated at 1000 lines; 190 more skipped]
CVSspam 0.2.8