lcsim/src/org/lcsim/contrib/CosminDeaconu
diff -N CurlerKiller.java
--- /dev/null 1 Jan 1970 00:00:00 -0000
+++ CurlerKiller.java 30 Aug 2007 18:50:52 -0000 1.1
@@ -0,0 +1,1115 @@
+/*
+ * CurlerKiller.java
+ *
+ * Created on July 30, 2007, 3:05 PM
+ *
+ * To change this template, choose Tools | Template Manager
+ * and open the template in the editor.
+ */
+
+package org.lcsim.contrib.CosminDeaconu;
+
+import Jama.Matrix;
+import java.util.List;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Set;
+import java.util.TreeSet;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.MCParticle;
+import org.lcsim.event.base.BaseTrackerHitMC;
+import org.lcsim.fit.line.SlopeInterceptLineFit;
+import org.lcsim.fit.line.SlopeInterceptLineFitter;
+import org.lcsim.fit.polynomial.PolynomialFit;
+import org.lcsim.fit.polynomial.PolynomialFitter;
+import org.lcsim.geometry.subdetector.MultiLayerTracker;
+import org.lcsim.util.Driver;
+import java.util.ArrayList;
+import org.lcsim.event.TrackerHit;
+import org.lcsim.util.aida.AIDA;
+import java.lang.Object;
+/**
+ * This driver takes input of a list of tracker hits and attempts to strip out
+ * all hits coming from curlers.
+ *
+ * It is divided as follows:
+ *
+ * CurlerKiller: This class has the algorithm for finding curlers. Also, it cheats to evaluate how well curlers are found.
+ Cluster: This class encapsulates a cluster and has the machinery for comparing different clusters
+ *
+ * Two helper classes used for sorting
+ *
+ * @author Cosmin Deaconu
+ */
+public class CurlerKiller extends Driver{
+
+ private ArrayList<TrackerHit> _hits = new ArrayList<TrackerHit>();
+
+ private String _output_name = "";
+ private String _ouput_curler = "";
+
+ private ArrayList<String> _input_collection = new ArrayList<String>();
+
+ private String _subdetector_name = "TrackerBarrel";
+ private int _nlayers = 5;
+ private double _max_phi_difference = 0.03; //for clustering purposes
+ private int _min_hits = 6; //to form a cluster
+ private double maxseparation = 300; //in z
+
+ private double dz = 100.0/Math.sqrt(12);
+ private double chisq_cut = 200; //The chisq for a cluster must be lower than this for the cluster to be considered
+ private double two_cluster_chisq_cut = 800; //The chisq between a pair of hits must be lower than this for them to be considered curlers
+ private double max_z_diff = 500; //max difference in average z between two clusters
+
+ private double max_residual = 500; //for a hit from the fit. If higher than this, will be dropped from cluster.
+ private double max_zrms = 10000; // for a cluster
+
+ private boolean discontinuous; //this is kind of clunky...
+
+ private boolean _histogram = true;
+
+ private AIDA _aida = AIDA.defaultInstance();
+
+ /** Creates a new instance of CurlerKiller */
+ public CurlerKiller() {
+
+ }
+
+
+ protected void process(EventHeader event)
+ {
+
+ //System.out.println("Curler Killer started");
+ getHits(event); //get the input hits
+
+
+ //map hits to their layers
+ ArrayList<TrackerHit>[] layermap = new ArrayList[_nlayers];
+
+ for (int i=0; i<layermap.length; i++)
+ {
+ layermap[i] = new ArrayList<TrackerHit>();
+ }
+
+ for (TrackerHit i : _hits)
+ {
+ int layer = ((BaseTrackerHitMC)i).getSimHits().get(0).getLayer();
+ layermap[layer].add(i);
+ }
+
+
+ //map mc particles to number of particles hit in each layer to find curlers by cheating
+
+ HashMap<MCParticle,Integer[]> hits_by_particle = new HashMap<MCParticle,Integer[]>();
+ HashMap<MCParticle,List<TrackerHit>> hits_from_mc = new HashMap<MCParticle,List<TrackerHit>>();
+
+ for (TrackerHit i : _hits)
+ {
+ ArrayList<MCParticle> theseparticles = (ArrayList)((BaseTrackerHitMC)i).mcParticles();
+
+ for (MCParticle part : theseparticles)
+ {
+ if (!hits_by_particle.containsKey(part))
+ {
+
+ Integer[] hitsbylayer = new Integer[5];
+ for (int n = 0; n<5; n++)
+ {
+ hitsbylayer[n] = new Integer(0);
+ }
+
+ int layer = ((BaseTrackerHitMC)i).getSimHits().get(0).getLayer();
+ hitsbylayer[layer]=(Integer) (hitsbylayer[layer].intValue()+1);
+ hits_by_particle.put(part,hitsbylayer);
+
+ List<TrackerHit> thisparticlehits = new ArrayList<TrackerHit>();
+ thisparticlehits.add(i);
+
+ hits_from_mc.put(part,thisparticlehits);
+ }
+ else
+ {
+ int layer = ((BaseTrackerHitMC)i).getSimHits().get(0).getLayer();
+ Integer[] hitsbylayer = hits_by_particle.get(part);
+ hitsbylayer[layer]++;
+ hits_by_particle.put(part,hitsbylayer);
+
+ List<TrackerHit> thisparticlehits = hits_from_mc.get(part);
+ if (!thisparticlehits.contains(i)) thisparticlehits.add(i);
+ hits_from_mc.put(part,thisparticlehits);
+ }
+ }
+ }
+
+
+ ArrayList<MCParticle> badparticles = new ArrayList<MCParticle>(); //bad particles curl
+ int probably_unfindable = 0;
+
+ for (MCParticle p: hits_by_particle.keySet())
+ {
+ int howmany = howManyLayersHit(hits_by_particle,p);
+ if (_histogram) _aida.cloud1D("layers hit by MCParticle").fill(howmany);
+
+ if (hits_from_mc.get(p).size()>howmany)
+ {
+ badparticles.add(p);
+
+ if (maxHitsInALayer(hits_by_particle,p)<2*_min_hits) probably_unfindable++;
+ }
+
+
+ }
+
+
+ ArrayList<TrackerHit> badhits = new ArrayList<TrackerHit>(); //bad hits belong to bad particles
+
+ for (MCParticle i : badparticles)
+ {
+ for (TrackerHit t : hits_from_mc.get(i))
+ {
+ if (!badhits.contains(t)) badhits.add(t);
+ }
+ }
+
+ if (_histogram) _aida.cloud1D("badhits").fill(badhits.size());
+
+ ArrayList<TrackerHit> remove_us = new ArrayList<TrackerHit>();
+ ArrayList<Cluster> removed_clusters = new ArrayList<Cluster>();
+
+
+
+ //System.out.println("Started looping through layers");
+
+
+ //loop through each layer
+
+ int layer = 0;
+ for (ArrayList<TrackerHit> this_layer_hits : layermap)
+ {
+ //plot stuff
+ if (_histogram) _aida.cloud2D("hits in layer "+layer).reset();
+
+ for (TrackerHit hit : this_layer_hits)
+ {
+ if (_histogram) _aida.cloud2D("hits in layer "+layer).fill(Math.atan2(hit.getPosition()[1],hit.getPosition()[0]),hit.getPosition()[2]); //this will plot the hits in each layer
+ }
+
+ layer++;
+
+ //find clusters in phi
+ ArrayList<Cluster> clusters = findClusters(this_layer_hits);
+ //System.out.println("Cluster List Generated");
+
+ //loop through each cluster
+ for (Cluster cluster : clusters)
+ {
+ if (removed_clusters.contains(cluster)) continue;
+
+ //check if cluster has a good fit
+ if (cluster.chisq()<chisq_cut)
+ {
+ double min_chisq = 10000000;
+ Cluster pair = null;
+
+ //if it does, find the pair with the best chisq match...
+ for (Cluster second_cluster : clusters)
+ {
+ if (second_cluster==cluster) continue;
+ if (removed_clusters.contains(second_cluster)) continue;
+ if (second_cluster.chisq()>chisq_cut) continue;
+ if (Math.abs(second_cluster.z_avg()-cluster.z_avg())>max_z_diff) continue;
+
+ double this_chisq = cluster.getClusterMatchChisq(second_cluster);
+
+ if (this_chisq<min_chisq)
+ {
+ min_chisq = this_chisq;
+ pair = second_cluster;
+ }
+ }
+
+ // make sure that the match chisq is good enough...
+ if (min_chisq < two_cluster_chisq_cut)
+ {
+ //System.out.println("Type One Cut Made");
+ removed_clusters.add(cluster);
+ removed_clusters.add(pair);
+ remove_us.addAll(cluster.hits());
+ remove_us.addAll(pair.hits());
+ }
+
+ }
+
+ //try to see if it's one combined cluster' (two in a really small space so it looks like one)
+ if (!removed_clusters.contains(cluster) && cluster.getSelfMatchChisq()<two_cluster_chisq_cut)
+ {
+ //System.out.println("Type Two Cut Made");
+ removed_clusters.add(cluster);
+ remove_us.addAll(cluster.hits());
+ }
+
+ }// loop through clusters
+// System.out.println("Layer Complete");
+ } //loop through layers
+
+ //calculate stats
+ int good_removals = 0;
+
+
+ ArrayList<TrackerHit> false_positives = new ArrayList<TrackerHit>();
+
+ for (TrackerHit i : remove_us)
+ {
+ if (badhits.contains(i))
+ {
+ good_removals++;
+ if (_histogram) _aida.cloud1D("energy of good removals").fill(i.getdEdx());
+ }
+ else
+ {
+ false_positives.add(i);
+ if (_histogram) _aida.cloud1D("energy of bad removals").fill(i.getdEdx());
+ }
+ }
+
+ int bad_removals = false_positives.size();
+
+// System.out.println();
+// System.out.println("Stats: ");
+// System.out.println("Good Removals: " + good_removals);
+// System.out.println("Bad Removals: " + bad_removals);
+// System.out.println("Total estimated curler hits: " + badhits.size());
+// System.out.println("Total removed: " + remove_us.size());
+// System.out.println("Known to be unfindable (though more are than these): " + probably_unfindable);
+// System.out.println("Estimated Percentage of curler hits removed: " + ((double)good_removals)/((double)badhits.size()));
+//
+ if (_histogram)
+ {
+ _aida.cloud1D("good removals").fill(good_removals);
+ _aida.cloud1D("bad removals").fill(bad_removals);
+ _aida.cloud1D("total removals").fill(remove_us.size());
+ _aida.cloud1D("percentage of curler hits removed").fill((double)good_removals/((double)badhits.size()));
+ _aida.cloud1D("percentage of removals that are good").fill((double)good_removals/((double)remove_us.size()));
+ _aida.cloud1D("probably unfindable").fill(probably_unfindable);
+ }
+
+ //output remaining hits
+ ArrayList<TrackerHit> cheathits = (ArrayList<TrackerHit>)_hits.clone();
+ _hits.removeAll(remove_us);
+ cheathits.removeAll(badhits);
+
+ for (MCParticle i : badparticles)
+ {
+ if (_histogram) _aida.cloud1D("_part_"+i.getType().toString()).fill(1);
+
+ }
+
+ if (_output_name!="")
+ {
+ event.put(_output_name,_hits,TrackerHit.class,0);
+ event.put(_output_name+"_curlers",remove_us,TrackerHit.class,0);
+ event.put(_output_name+"_false_positives",false_positives,TrackerHit.class,0);
+ event.put(_output_name+"_cheat_curlers",badhits,TrackerHit.class,0);
+ event.put(_output_name+"_cheat",cheathits,TrackerHit.class,0);
+ }
+
+// System.out.println("Event Complete");
+// System.out.println("-----------------------------------------------------");
+ }
+
+ //-------------------Public controller functions---------------------//
+
+ public void addInput(String collection_name)
+ {_input_collection.add(collection_name);}
+
+ public void clearInput()
+ {_input_collection.clear();}
+
+ public void setOutput (String collection_name)
+ {_output_name = collection_name;}
+
+
+ public void setMaxPhiDifference(double mpd)
+ {_max_phi_difference=mpd;}
+
+ public void setMinHits(int mh)
+ {_min_hits=mh;}
+
+ public void setMaxZSeparation(double mzs)
+ {maxseparation=mzs;}
+
+ public void set_dz(double newdz)
+ {dz=newdz;}
+
+ public void setFitChisqCut(double fcc)
+ {chisq_cut=fcc;}
+
+ public void setTwoClusterChisqCut(double tccc)
+ {two_cluster_chisq_cut=tccc;}
+
+ public void setMaxZDiff(double mzd)
+ {max_z_diff = mzd;}
+
+ public void setMaxResid(double mr)
+ {max_residual=mr;}
+
+ public void setMaxZRMS(double mzrms)
+ {max_zrms = mzrms;}
+
+ public void makeHistograms (boolean mh)
+ {_histogram = mh;}
+
+ //-------------------Private Helper Functions-------------------------//
+
+ //this function makes clusters in phi. Probably can be sped up by sorting in phi first instead of just comparing phi with all hits. '
+ private ArrayList<Cluster> findClusters(ArrayList<TrackerHit> input)
+ {
+ ArrayList<TrackerHit> unclustered = new ArrayList<TrackerHit>(input);
+ ArrayList<Cluster> returnme = new ArrayList<Cluster>();
+
+ for (TrackerHit i : input)
+ {
+ if (!unclustered.contains(i)) continue;
+
+ ArrayList<TrackerHit> this_cluster = new ArrayList<TrackerHit>();
+ this_cluster.add(i);
+
+ discontinuous = false; //assume cluster is continuous (in phi... -pi = pi and all that...)
+
+ while(true)
+ {
+ ArrayList<TrackerHit> near = nearbyHits(this_cluster,unclustered);
+ if (near.isEmpty()) break;
+ this_cluster.addAll(near);
+ unclustered.removeAll(near);
+ }
+
+ if (this_cluster.size()>_min_hits)
+ {
+ Cluster c = new Cluster(this_cluster,dz,maxseparation,discontinuous);
+
+ if (_histogram)_aida.cloud1D("zrms").fill(c.zrms());
+
+ c.purgeHits(max_residual,_min_hits);
+
+ if (c.hits().size()>_min_hits && c.zrms()<max_zrms)returnme.add(c); // since the cluster size can reduce due to z-separation, we should check again
+ }
+ }
+ return returnme;
+ }
+
+ //helps clustsering
+ private ArrayList<TrackerHit> nearbyHits(ArrayList<TrackerHit> cluster, ArrayList<TrackerHit> unclustered)
+ {
+ ArrayList<TrackerHit> nearby_hits = new ArrayList<TrackerHit>();
+
+ for (TrackerHit seed_hit : cluster)
+ {
+ for (TrackerHit hit : unclustered)
+ {
+ if (nearby_hits.contains(hit)) continue;
+ double phi_seed = Math.atan2(seed_hit.getPosition()[1],seed_hit.getPosition()[0]);
+ double phi_hit = Math.atan2(hit.getPosition()[1],hit.getPosition()[0]);
+
+ if (Math.abs(phi_seed-phi_hit) < _max_phi_difference)
+ {
+ nearby_hits.add(hit);
+ }
+ else if( (phi_hit>0 && phi_seed<0 && Math.abs(phi_seed + 2*Math.PI - phi_hit) < _max_phi_difference) ||
+ (phi_hit<0 && phi_seed>0 && Math.abs(phi_seed - phi_hit - 2*Math.PI) < _max_phi_difference) )
+ {
+ nearby_hits.add(hit);
+ discontinuous = true;
+ }
+ }
+ }
+ return nearby_hits;
+ }
+
+
+ private int howManyLayersHit(HashMap<MCParticle,Integer[]> h, MCParticle p)
+ {
+ int returnme = 0;
+
+ for (Integer i : h.get(p))
+ {
+ if (i>0) returnme++;
+ }
+
+ return returnme;
+ }
+
+ private int maxHitsInALayer(HashMap<MCParticle,Integer[]> h, MCParticle p)
+ {
+ int max_hits = -1;
+
+ for (Integer i : h.get(p))
+ {
+ if (i.intValue()>max_hits) max_hits = i.intValue();
+ }
+
+ return max_hits;
+
+ }
+
+
+ private void getHits(EventHeader event)
+ {
+ _hits.clear();
+ for (String s : _input_collection) _hits.addAll((ArrayList)event.get(TrackerHit.class,s));
+ }
+}
+
+//================================================================Cluster Class ===================================================================================//
+class Cluster
+ {
+ private ArrayList<TrackerHit> hits;
+ private double chisq=Double.POSITIVE_INFINITY;
+
+ private PolynomialFitter fitter = new PolynomialFitter();
+ private PolynomialFit fit = null;
+
+ private double zrms;
+ private HashMap<TrackerHit,Double> seps = new HashMap<TrackerHit,Double>();
+
+ private double[] phi;
+ private double[] z;
+ private double[] dz;
+
+ private double z_avg = 0;
+ private double z_min;
+ private double z_max;
+
+ private double phi_avg;
+ private double phi_min;
+ private double phi_max;
+
+ private AIDA aida = AIDA.defaultInstance();
+
+ private double[] params = new double[4];
+
+ private boolean fail = false;
+
+ private double chisq_change_threshold = 1.5;
+ private double chisq_absolute_threshold = 1;
+
+ private double scale_amount = 1.; //work in milliradians
+
+ private Matrix cv = null;
+ private Matrix CV = null;
+
+ private boolean _discontinuous;
+
+ private double z_sep_max_rel = 5.;
+ private double z_sep_avg = 1000000000;
+
+ public Cluster(ArrayList<TrackerHit> h, double _dz, double max_sep, boolean discontinuous)
+ {
+ hits = h;
+ _discontinuous = discontinuous;
+
+ makeSeparations();
+ requireSeparations(max_sep); //require points to be fairly close in z
+
+ if (hits.size()<4)
+ {
+ fail=true;
+// System.out.println("too few points in a cluster after z separation");
+ return;
+ }
+
+ phi = new double[hits.size()];
+ z = new double[hits.size()];
+ dz = new double[hits.size()];
+
+ for (int i = 0; i<hits.size(); i++)
+ {
+ dz[i] = _dz;
+ z[i] = hits.get(i).getPosition()[2];
+ phi[i] = scale_amount*Math.atan2(hits.get(i).getPosition()[1],hits.get(i).getPosition()[0]); //work in milliradians
+ }
+
+ // fit
+ int order = 2;
+ chisq = 1000000000000.;
+ double lastchisq = 1000000000000000.;
+ double[] lastparams = null;
+ while (order<5)
+ {
+ lastparams = params;
+ lastchisq = chisq;
+
+ boolean fitok = false;
+
+ try
+ {
+ fitok = fitter.fit(phi,z,dz,hits.size(),order);
+ }
+ catch (RuntimeException re) //eww, singular matrices
+ {
+ fail = true;
+ }
+
+
+ if (!fitok)
+ {
+ System.out.println("Warning, polyfit failed");
+ fail = true;
+ return;
+ }
+
+
+ fit = fitter.getFit();
+// System.out.println(fit.parameters());
+ for (int i = 0; i<order; i++) params[i]=fit.parameters().getColumnPackedCopy()[i];
+ for (int i = order; i<4; i++) params[i]=0;
+
+ chisq = calculateChisq();
+
+// System.out.println("chisq with order "+order+" = "+chisq);
+
+ if (chisq < chisq_absolute_threshold)
+ {
+// System.out.println("Using order "+order);
+ break;
+ }
+
+ if (lastchisq < chisq_change_threshold*chisq)
+ {
+// System.out.println("Using order "+(order-1));
+ params = lastparams;
+ chisq = lastchisq;
+ break;
+ }
+
+
+ order++;
+ }
+
+// System.out.println("Coords: ");
+// for (int i = 0; i<hits.size(); i++) System.out.println("z: "+z[i]+" phi: "+phi[i]);
+// System.out.println("Parameters: ");
+// for (double i : params) System.out.println(i);
+// System.out.println("Covariance: ");
+// System.out.println(fit.covariance().toString());
+
+
+ //calculate mins, maxes, averages for z's and phi'
+ double total = 0;
+ for (double i : z) total = total+i;
+ z_avg = total/((double)z.length);
+
+ phi_min = Double.POSITIVE_INFINITY;
+ phi_max = Double.NEGATIVE_INFINITY;
+
+ for (double i : phi)
+ {
+ if (!_discontinuous)
+ {
+ if (i>phi_max) phi_max = i;
+ if (i<phi_min) phi_min = i;
+ }
+ else
+ {
+ if (i<0 && i>phi_max) phi_max = i;
+ if (i>0 && i<phi_min) phi_min = i;
+ }
+ }
+
+ total = 0;
+ for (double i : phi)
+ {
+ total += i;
+ if (_discontinuous && i<0) total+= 2*Math.PI;
+ }
+
+
+ phi_avg = total/((double)phi.length);
+
+ if (phi_avg > Math.PI) phi_avg -= 2*Math.PI;
+
+ //System.out.println(hits.size());
+
+ zrms = calculateZRMS(); //z_min and max are calculated here
+
+
+ // plotFit();
+ }
+
+ public double z_avg()
+ {return z_avg;}
+
+ public double z_min()
+ {return z_min;}
+
+ public double z_max()
+ {return z_max;}
+
+ public double phi_avg()
+ {return phi_avg;}
+
+ public double phi_min()
+ {return phi_min;}
+
+ public double phi_max()
+ {return phi_max;}
+
+ public double[] params()
+ {return params;}
+
+ public PolynomialFit getFit()
+ {return fit;}
+
+ public double chisq()
+ {return chisq;}
+
+ public double zrms()
+ {return zrms;}
+
+ public ArrayList<TrackerHit> hits()
+ {return hits;}
+
+ public boolean fail()
+ {return fail;}
+
+ public double getClusterMatchChisq(Cluster cl) //this is the really important function that tests two clusters too see if htey match
+ {
+ if (fail || cl.fail()) return Double.POSITIVE_INFINITY;
+
+// if (fit.covariance().rank()!=cl.getFit().covariance().rank())
+// {
+// System.out.println("Incompatible fits.");
+// return Double.POSITIVE_INFINITY; //require two clusters to have same type of fit
+// }
+
+ double phi_0 = (phi_avg + cl.phi_avg())/2.0;
+
+ double[] diffs = new double[4];
+
+ //lower case ==> something belong to this cluster. Upper case ==> something belong to cl.
+
+ double a=params[3];
+ double b=params[2];
+ double c=params[1];
+ double d=params[0];
+
+ double A=cl.params()[3];
+ double B=cl.params()[2];
+ double C=cl.params()[1];
+ double D=cl.params()[0];
+
+ //polynomial differences for up to a cubic... if you want to go past a cubic, you'll have to figure something out'
+ diffs[0] = -(a)-A;
+ diffs[1] = (b+6*a*phi_0)-B;
+ diffs[2] = -(12*a*phi_0*phi_0+4*b*phi_0+c)-C;
+ diffs[3] = (8*a*(phi_0*phi_0*phi_0)+4*b*(phi_0*phi_0)+2*c*phi_0+d)-D;
+
+// System.out.println();
+
+ double[] errors_squared = new double[4];
+ cv = fit.covariance();
+ CV = cl.getFit().covariance();
+
+// System.out.println("==============================");
+// System.out.println(cv);
+// System.out.println();
+// System.out.println(CV);
+//
+ //this error propagation works for polynomials up to the cubic term. In the future, it might be a good idea
+ //to not just hardcode this stuff.
+ errors_squared[0] = covar(0,0) + Covar(0,0);
+
+ errors_squared[1] = covar(1,1) + Covar(1,1) +
+ Math.pow(6*phi_0,2) * (covar(0,0)) +
+ (6*phi_0) * (2*covar(1,0));
+
+ errors_squared[2] = covar(2,2) + Covar(2,2) +
+ Math.pow(12*phi_0*phi_0,2) * covar(0,0) +
+ 12*Math.pow(phi_0,2)*(2*covar(0,2) ) +
+ Math.pow(4*phi_0,2) * (covar(1,1)) +
+ (4*phi_0) * (2*covar(2,1)) +
+ 48*Math.pow(phi_0,3) * (2*covar(1,0));
+
+ errors_squared[3] = covar(3,3) + Covar(3,3) +
+ Math.pow( 8*Math.pow(phi_0,3), 2) * (covar(0,0)) +
+ (8*Math.pow(phi_0,3)) * (2*covar(0,3)) +
+ Math.pow(4*phi_0*phi_0,2) * ( covar(1,1)) +
+ 4*Math.pow(phi_0,2)*(2*covar(1,3)) +
+ Math.pow(2*phi_0,2) * (covar(2,2)) +
+ (2*phi_0) * (2*covar(2,3)) +
+ 32*Math.pow(phi_0,5) * (2*covar(0,1)) +
+ 16*Math.pow(phi_0,4) * (2*covar(0,2)) +
+ 8*Math.pow(phi_0,3) * (2*covar(1,2));
+
+//
+// System.out.println();
+// System.out.println("error terms:");
+// for (double doub : errors_squared) System.out.println(doub);
+
+ //get chisq value (E=0)
+ double returnme = 0;
+ for (int i = 0; i<diffs.length; i++) returnme+=(diffs[i]*diffs[i])/(errors_squared[i]);
+
+// aida.cloud1D("match chisq").fill(returnme);
+// System.out.println("chisq: "+returnme);
+// System.out.println();
+ return returnme;
+ }
+
+ //for bound checking
+ private double covar(int a, int b)
+ {
+ if(a<cv.rank() && b<cv.rank())
+ {return cv.get(a,b);}
+
+ return 0;
+ }
+
+ //ditto
+ private double Covar(int a, int b)
+ {
+ if (a<CV.rank() && b<CV.rank())
+ {return CV.get(a,b);}
+
+ return 0;
+ }
+
+ public double getSelfMatchChisq() //splits a cluster in two and compares each half with the other
+ {
+ if (fail) return Double.POSITIVE_INFINITY;
+
+ ArrayList<TrackerHit> c1_hits = new ArrayList<TrackerHit>();
+ ArrayList<TrackerHit> c2_hits = new ArrayList<TrackerHit>();
+
+ for (TrackerHit i : hits) //split cluster into two around phi_avg
+ {
+ double p = scale_amount*Math.atan2(i.getPosition()[1],i.getPosition()[0]);
+
+ if (!_discontinuous && p < phi_avg) c1_hits.add(i);
+
+ else if (_discontinuous && p<0 && phi_avg<0 && p<phi_avg) c1_hits.add(i);
+ else if (_discontinuous && p>0 && phi_avg<0) c1_hits.add(i);
+ else if (_discontinuous && p>0 && phi_avg>0 && p<phi_avg) c1_hits.add(i);
+
+ else c2_hits.add(i);
+ }
+
+// System.out.println("cluster split "+z_avg);
+
+ Cluster c1 = null;
+ Cluster c2 = null;
+
+ if (!_discontinuous)
+ {
+ c1 = new Cluster(c1_hits,dz[0],100000,false);
+ c2 = new Cluster(c2_hits,dz[0],100000,false);
+ }
+
+ else if (_discontinuous && phi_avg>0)
+ {
+ c1 = new Cluster(c1_hits,dz[0],100000,false);
+ c2 = new Cluster(c2_hits,dz[0],100000,true);
+ }
+
+ else
+ {
+ c1 = new Cluster(c1_hits,dz[0],100000,true);
+ c2 = new Cluster(c2_hits,dz[0],100000,false);
+ }
+ return c1.getClusterMatchChisq(c2);
+ }
+
+// public double getClusterMatchAreaSquared(Cluster cl) //experimental. probably doesn't work. '
+// {
+// if (fail || cl.fail()) return Double.POSITIVE_INFINITY;
+//
+// double upper_bound = cl.phi_max();
+// double lower_bound = cl.phi_min();
+//
+// //double returnme = Math.abs( evaluateRidiculousIntegral(upper_bound,cl) - evaluateRidiculousIntegral(lower_bound,cl) );
+// double returnme = Math.abs(approximateRidiculousIntegral(cl,100));
+//
+// aida.cloud1D("match area").fill(returnme);
+// System.out.println("area: "+returnme);
+//
+// return returnme;
+//
+// }
+//
+//
+// public double getSelfMatchAreaSquared() //experimental. probably doesn't work. '
+// {
+// if (fail) return Double.POSITIVE_INFINITY;
+//
+// ArrayList<TrackerHit> c1_hits = new ArrayList<TrackerHit>();
+// ArrayList<TrackerHit> c2_hits = new ArrayList<TrackerHit>();
+//
+// for (TrackerHit i : hits) //split cluster into two around phi_avg
+// {
+// if (scale_amount*Math.atan2(i.getPosition()[1],i.getPosition()[0]) > phi_avg) c1_hits.add(i);
+// else c2_hits.add(i);
+// }
+//
+// System.out.println("cluster split "+z_avg);
+//
+// Cluster c1 = new Cluster(c1_hits,dz[0],100000);
+// Cluster c2 = new Cluster(c2_hits,dz[0],100000);
+//
+//
+// return c1.getClusterMatchAreaSquared(c2);
+//
+// }
+//
+ public void purgeHits(double max_residual,int min_points) // removes hits from the cluster with residuals that are too large until up until min_points is satisfied
+ {
+ if (hits.size() <= min_points) return;
+
+ //first, we want to sort in decreasing order by the size of the residual
+ ArrayList<HitPlusResidual> hpr = new ArrayList<HitPlusResidual>();
+ for (TrackerHit hit : hits)
+ {
+ hpr.add(new HitPlusResidual(hit,calculateResidual(hit)));
+ }
+
+ CompareResiduals<HitPlusResidual> comp = new CompareResiduals<HitPlusResidual>();
+ Collections.sort(hpr,comp);
+
+ for (HitPlusResidual h : hpr)
+ {
+ if (hpr.size() <= min_points) break;
+ if (h.residual() > max_residual) hits.remove(h.hit());
+ }
+ }
+
+ private double calculateZRMS()
+ {
+ if (fail) return Double.POSITIVE_INFINITY;
+
+ double[] z_copy = new double[z.length];
+ for (int i = 0; i<z.length;i++) z_copy[i]=z[i];
+ Arrays.sort(z_copy);
+ z_min = z_copy[0];
+ z_max = z_copy[z_copy.length-1];
+ double diff_z2_total = 0;
+
+ for (int i = 0; i<z.length-1; i++)
+ {
+ diff_z2_total += Math.pow((z_copy[i+1]-z_copy[i]),2);
+ }
+
+
+ return Math.sqrt(diff_z2_total/((double)(z.length-1)));
+ }
+
+// private double evaluateRidiculousIntegral(double x, Cluster cl) //experimental
+// {
+// double phi_0 = (phi_max + cl.phi_max())/2.0;
+//
+//
+// double a=params[3];
+// double b=params[2];
+// double c=params[1];
+// double d=params[0];
+//
+// double A=cl.params()[3];
+// double B=cl.params()[2];
+// double C=cl.params()[1];
+// double D=cl.params()[0];
+//
+// //integral calculated with maxima, with some luck transcribed correctly
+// return x*D*D+x*x*C*D+(2*x*x*x*B*D)/3.+(x*x*x*x*A*D)/2.+
+// (a*x*x*x*x*x*D)/2.-4*a*phi_0*x*x*x*D-
+// (2*b*x*x*x*D)/3.+12*a*phi_0*phi_0*x*x*D+ 4*b*phi_0*x*x*D+c*x*x*D-
+// 16*a*phi_0*phi_0*phi_0*x*D-8*b*phi_0*phi_0*x*D-
+// 4*c*phi_0*x*D-2*d*x*D+(x*x*x*C*C)/3.+
+// (x*x*x*x*B*C)/2.+(2*x*x*x*x*x*A*C)/5.+(2*a*x*x*x*x*x*C)/5.-
+// 3*a*phi_0*x*x*x*x*C-(b*x*x*x*x*C)/2.+
+// 8*a*phi_0*phi_0*x*x*x*C+(8*b*phi_0*x*x*x*C)/3.+
+// (2*c*x*x*x*C)/3.-8*a*phi_0*phi_0*phi_0*x*x*C-
+// 4*b*phi_0*phi_0*x*x*C-2*c*phi_0*x*phi_0*C-
+// d*x*x*C+(x*x*x*x*x*B*B)/5.+(x*x*x*x*x*x*A*B)/3.+
+// (a*x*x*x*x*x*x*B)/3.-(12*a*phi_0*x*x*x*x*x*B)/5.-
+// (2*b*x*x*x*x*x*B)/5.+6*a*phi_0*phi_0*x*x*x*x*B+
+// 2*b*phi_0*x*x*x*x*B+(c*x*x*x*x*B)/2.-
+// (16*a*phi_0*phi_0*phi_0*x*x*x*B)/3.-(8*b*phi_0*phi_0*x*x*x*B)/3.
+// -(4*c*phi_0*x*x*x*B)/3.-(2*d*x*x*x*B)/3.+(x*x*x*x*x*x*x*A*A)/7.+
+// (2*a*x*x*x*x*x*x*x*A)/7.-2*a*phi_0*x*x*x*x*x*x*A-(b*x*x*x*x*x*x*A)/3.
+// +(24*a*phi_0*phi_0*x*x*x*x*x*A)/5.+(8*b*phi_0*x*x*x*x*x*A)/5.
+// +(2*c*x*x*x*x*x*A)/5.-4*a*phi_0*phi_0*phi_0*x*x*x*x*A-
+// 2*b*phi_0*phi_0*x*x*x*x*A-c*phi_0*x*x*x*x*A-
+// (d*x*x*x*x*A)/2.+(a*a*x*x*x*x*x*x*x)/7.-
+// 2*a*a*phi_0*x*x*x*x*x*x-(a*b*x*x*x*x*x*x)/3.+
+// 12*a*a*phi_0*phi_0*x*x*x*x*x+4*a*b*phi_0*x*x*x*x*x+(2*a*c*x*x*x*x*x)/5.
+// +(b*b*x*x*x*x*x)/5.-40*a*a*phi_0*phi_0*phi_0*x*x*x*x-
+// 20*a*b*phi_0*phi_0*x*x*x*x-4*a*c*phi_0*x*x*x*x-2*b*b*phi_0*x*x*x*x-
+// (a*d*x*x*x*x)/2.-(b*c*x*x*x*x)/2.+80*a*a*phi_0*phi_0*phi_0*phi_0*x*x*x+
+// (160*a*b*phi_0*phi_0*phi_0*x*x*x)/3.+16*a*c*phi_0*phi_0*x*x*x+
+// 8*b*b*phi_0*phi_0*x*x*x+4*a*d*phi_0*x*x*x+4*b*c*phi_0*x*x*x+(2*b*d*x*x*x)/3.
+// +(c*c*x*x*x)/3.-96*a*a*phi_0*phi_0*phi_0*phi_0*phi_0*x*x-
+// 80*a*b*phi_0*phi_0*phi_0*phi_0*x*x-32*a*c*phi_0*phi_0*phi_0*x*x-
+// 16*b*b*phi_0*phi_0*phi_0*x*x-12*a*d*phi_0*phi_0*x*x-
+// 12*b*c*phi_0*phi_0*x*x-4*b*d*phi_0*x*x-2*c*c*phi_0*x*x-
+// c*d*x*x+64*a*a*phi_0*phi_0*phi_0*phi_0*phi_0*phi_0*x+
+// 64*a*b*phi_0*phi_0*phi_0*phi_0*phi_0*x+
+// 32*a*c*phi_0*phi_0*phi_0*phi_0*x+16*b*b*phi_0*phi_0*phi_0*phi_0*x
+// +16*a*d*phi_0*phi_0*phi_0*x+16*b*c*phi_0*phi_0*phi_0*x+
+// 8*b*d*phi_0*phi_0*x+4*c*c*phi_0*phi_0*x+4*c*d*phi_0*x+
+// d*d*x-(128*a*a*phi_0*phi_0*phi_0*phi_0*phi_0*phi_0*phi_0)/7.;
+// }
+//
+// private double approximateRidiculousIntegral(Cluster cl, int n) //approximate integral using MC. experimental
+// {
+// double min = cl.phi_min();
+// double max = cl.phi_max();
+//
+// double width = max-min;
+// double phi_0 = (phi_avg+cl.phi_avg())/2.0;
+//
+// double a=params[3];
+// double b=params[2];
+// double c=params[1];
+// double d=params[0];
+//
+// double[] translated_params = new double[4];
+// translated_params[0] = -(a);
+// translated_params[1] = (b+6*a*phi_0);
+// translated_params[2] = -(12*a*phi_0*phi_0+4*b*phi_0+c);
+// translated_params[3] = (8*a*(phi_0*phi_0*phi_0)+4*b*(phi_0*phi_0)+2*c*phi_0+d);
+//
+// double total = 0;
+// for (int i = 0; i<n; i++)
+// {
+// double x = Math.random()*width+min;
+// total+=Math.abs(f(x,translated_params)-f(x,cl.params()));
+// }
+//
+// return width*total/((double)n);
+// }
+//
+
+
+ private double calculateResidual(TrackerHit h)
+ {
+ double real_z = h.getPosition()[2];
+ double p = scale_amount*Math.atan2(h.getPosition()[1],h.getPosition()[0]);
[truncated at 1000 lines; 120 more skipped]