Author: [log in to unmask]
Date: Mon Dec 15 23:04:00 2014
New Revision: 1754
Log:
Working snapshot of ECAL cosmic analysis.
Added:
java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdCosmicClusterDriver.java
java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java
Modified:
java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterDriver.java
java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterPlotsDriver.java
java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/MoyalFitFunction.java
java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java
Modified: java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterDriver.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterDriver.java (original)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterDriver.java Mon Dec 15 23:04:00 2014
@@ -60,7 +60,7 @@
}
/**
- * Set the name of the input RawTrackerHit collection name.
+ * Set the name of the input CalorimeterHit collection name.
* By default this is initialized to "EcalCosmicCalHits".
* @param inputHitCollectionName The name of the input hit collection.
*/
@@ -147,7 +147,7 @@
* @param hitMap The hit map with all the collection's hits.
* @return The set of neighboring hit IDs.
*/
- private Set<Long> findNeighborHitIDs(CalorimeterHit hit, Map<Long, CalorimeterHit> hitMap) {
+ protected Set<Long> findNeighborHitIDs(CalorimeterHit hit, Map<Long, CalorimeterHit> hitMap) {
Set<Long> neigbhors = ecal.getNeighborMap().get(hit.getCellID());
Set<Long> neighborHitIDs = new HashSet<Long>();
for (long neighborID : neigbhors) {
@@ -163,7 +163,7 @@
* @param hitList The input hit list.
* @return The hit map.
*/
- private Map<Long, CalorimeterHit> createHitMap(List<CalorimeterHit> hitList) {
+ protected Map<Long, CalorimeterHit> createHitMap(List<CalorimeterHit> hitList) {
Map<Long, CalorimeterHit> hitMap = new HashMap<Long, CalorimeterHit>();
for (CalorimeterHit hit : hitList) {
hitMap.put(hit.getCellID(), hit);
@@ -179,7 +179,7 @@
* @param hitList The input hit list from the event.
* @return A list of calorimeter hits that can be turned into clusters.
*/
- private List<List<CalorimeterHit>> createClusteredHits(List<CalorimeterHit> hitList) {
+ protected List<List<CalorimeterHit>> createClusteredHits(List<CalorimeterHit> hitList) {
// Create empty list of clusters which are just lists of hits.
List<List<CalorimeterHit>> clusterList = new ArrayList<List<CalorimeterHit>>();
@@ -241,7 +241,7 @@
* @param clusteredHitLists The input hit lists.
* @return The hit lists that passed the cuts.
*/
- List<List<CalorimeterHit>> applyCuts(List<List<CalorimeterHit>> clusteredHitLists) {
+ protected List<List<CalorimeterHit>> applyCuts(List<List<CalorimeterHit>> clusteredHitLists) {
List<List<CalorimeterHit>> selectedHitLists = new ArrayList<List<CalorimeterHit>>();
for (List<CalorimeterHit> hitList : clusteredHitLists) {
Map<Integer, Set<CalorimeterHit>> rowMap = new HashMap<Integer, Set<CalorimeterHit>>();
Modified: java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterPlotsDriver.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterPlotsDriver.java (original)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/CosmicClusterPlotsDriver.java Mon Dec 15 23:04:00 2014
@@ -40,6 +40,7 @@
* Create ADC value plots from the cosmic clusters.
* @author Jeremy McCormick <[log in to unmask]>
*/
+// TODO: Add Driver argument to specify where fit files will go (which can use the outputFileName arg in steering file).
public class CosmicClusterPlotsDriver extends Driver {
EcalConditions conditions;
@@ -62,6 +63,7 @@
IHistogram1D fitMpvPullH1D;
IHistogram1D fitWidthPullH1D;
+ String fitDirectory = "fits";
String inputClusterCollectionName = "EcalCosmicClusters";
String rawHitsCollectionName = "EcalCosmicReadoutHits";
boolean doFits = true;
@@ -72,6 +74,10 @@
public void setDoFits(boolean doFits) {
this.doFits = doFits;
+ }
+
+ public void setFitDirectory(String fitDirectory) {
+ this.fitDirectory = fitDirectory;
}
public void setWritePulseShapeParameters(boolean writePulseShapeParameters) {
@@ -186,7 +192,7 @@
}
private void doFits() {
- File plotDir = new File("fits");
+ File plotDir = new File(fitDirectory);
plotDir.mkdir();
buffer = new StringBuffer();
@@ -236,7 +242,7 @@
plotter.region(0).plot(combinedSignalProfile, plotStyle);
plotter.region(0).plot(fitResult.fittedFunction(), functionStyle);
try {
- plotter.writeToFile("fits" + File.separator + "CombinedSignalProfileFit.png");
+ plotter.writeToFile(fitDirectory + File.separator + "CombinedSignalProfileFit.png");
} catch (IOException e) {
throw new RuntimeException(e);
}
@@ -266,8 +272,8 @@
fitMpvH1D.fill(fitResult.fittedFunction().parameter("mpv"));
fitWidthH1D.fill(fitResult.fittedFunction().parameter("width"));
- fitMpvPullH1D.fill((fitResult.fittedFunction().parameter("mpv") - this.combinedMpv) / fitResult.errors()[0]);
- fitWidthPullH1D.fill((fitResult.fittedFunction().parameter("width") - this.combinedWidth) / fitResult.errors()[1]);
+ fitMpvPullH1D.fill((fitResult.fittedFunction().parameter("mpv") - this.combinedMpv) / fitResult.errors()[2]);
+ fitWidthPullH1D.fill((fitResult.fittedFunction().parameter("width") - this.combinedWidth) / fitResult.errors()[3]);
IPlotter plotter = plotterFactory.create();
IPlotterStyle functionStyle = plotterFactory.createPlotterStyle();
@@ -283,7 +289,7 @@
plotter.region(0).plot(profile, plotStyle);
plotter.region(0).plot(fitResult.fittedFunction(), functionStyle);
try {
- plotter.writeToFile("fits" + File.separator + "EcalChannel" + String.format("%03d", channel.getChannelId()) + "Fit.png");
+ plotter.writeToFile(fitDirectory + File.separator + "EcalChannel" + String.format("%03d", channel.getChannelId()) + "Fit.png");
} catch (IOException e) {
throw new RuntimeException(e);
}
Added: java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdCosmicClusterDriver.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdCosmicClusterDriver.java (added)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdCosmicClusterDriver.java Mon Dec 15 23:04:00 2014
@@ -0,0 +1,201 @@
+package org.hps.analysis.ecal.cosmic;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
+
+import org.lcsim.detector.identifier.Identifier;
+import org.lcsim.event.CalorimeterHit;
+import org.lcsim.event.Cluster;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.RawTrackerHit;
+import org.lcsim.event.base.BaseCluster;
+import org.lcsim.lcio.LCIOConstants;
+
+public class DualThresholdCosmicClusterDriver extends CosmicClusterDriver {
+
+ String inputTightHitsCollection = "TightEcalCosmicReadoutHits";
+ String inputLooseHitsCollection = "LooseEcalCosmicCalHits";
+
+ List<CalorimeterHit> currentLooseHits;
+ List<RawTrackerHit> currentTightHits;
+ Set<Long> currentTightHitCellIDs;
+
+ boolean writeOnlyClusterEvents = true;
+
+ static boolean debug = false;
+
+ public void writeOnlyClusterEvents(boolean writeOnlyClusterEvents) {
+ this.writeOnlyClusterEvents = writeOnlyClusterEvents;
+ }
+
+ public void setInputTightHitsCollection(String inputTightHitsCollection) {
+ this.inputTightHitsCollection = inputTightHitsCollection;
+ }
+
+ public void setInputLooseHitsCollection(String inputLooseHitsCollection) {
+ this.inputLooseHitsCollection = inputLooseHitsCollection;
+ }
+
+ private Set<Long> createCellIDSet(List<RawTrackerHit> rawHits) {
+ Set<Long> set = new HashSet<Long>();
+ for (RawTrackerHit hit : rawHits) {
+ set.add(hit.getCellID());
+ }
+ return set;
+ }
+
+ public void process(EventHeader event) {
+
+ // Get loose hits collection of CalorimeterHits.
+ if (!event.hasCollection(CalorimeterHit.class, inputLooseHitsCollection)) {
+ throw new RuntimeException("The collection " + inputLooseHitsCollection + " does not exist.");
+ }
+ currentLooseHits = event.get(CalorimeterHit.class, inputLooseHitsCollection);
+ if (debug) {
+ System.out.println("DEBUG: collection " + inputLooseHitsCollection + " has " + currentLooseHits.size() + " hits");
+ }
+
+ // Get tight hits collection of RawTrackerHit (raw data).
+ if (!event.hasCollection(RawTrackerHit.class, inputTightHitsCollection)) {
+ throw new RuntimeException("The collection " + inputTightHitsCollection + " does not exist.");
+ }
+ currentTightHits = event.get(RawTrackerHit.class, inputTightHitsCollection);
+ if (debug) {
+ System.out.println("DEBUG: collection " + inputTightHitsCollection + " has " + currentTightHits.size() + " hits");
+ }
+
+ // Create set of tight hit cell IDs for use by clustering algorithm.
+ currentTightHitCellIDs = createCellIDSet(currentTightHits);
+
+ // Create the clusters.
+ List<Cluster> clusters = createClusters(currentLooseHits);
+ if (debug) {
+ System.out.println("DEBUG: created " + clusters.size() + " clusters from " + inputLooseHitsCollection);
+ }
+
+ // Apply cluster cuts.
+ List<Cluster> selectedClusters = this.applyClusterCuts(clusters);
+ if (debug) {
+ System.out.println("DEBUG: " + selectedClusters.size() + " clusters passed cuts");
+ }
+
+ if (selectedClusters.isEmpty() && this.writeOnlyClusterEvents) {
+ if (debug) {
+ System.out.println("skipping event without any clusters");
+ }
+ throw new NextEventException();
+ }
+
+ // Write clusters to event (may write empty collection).
+ int flags = 1 << LCIOConstants.CLBIT_HITS;
+ event.put(outputClusterCollectionName, selectedClusters, Cluster.class, flags);
+ }
+
+ List<Cluster> createClusters(List<CalorimeterHit> hitList) {
+ List<Cluster> clusters = new ArrayList<Cluster>();
+
+ // Create map of IDs to hits for convenience.
+ Map<Long, CalorimeterHit> hitMap = createHitMap(hitList);
+
+ // Create list of hits that are clusterable, which is initially all of them.
+ Set<CalorimeterHit> clusterable = new HashSet<CalorimeterHit>();
+ clusterable.addAll(hitList);
+
+ // Loop over all hits in the map.
+ for (CalorimeterHit currentHit : hitList) {
+
+ // Is hit clusterable and in tight significance list?
+ if (!clusterable.contains(currentHit) || !currentTightHitCellIDs.contains(currentHit.getCellID())) {
+ // Continue to next hit.
+ continue;
+ }
+
+ // Create list for clustering this hit.
+ List<CalorimeterHit> clusterHits = new ArrayList<CalorimeterHit>();
+
+ // Set of hits whose neighbors have not been checked yet.
+ LinkedList<CalorimeterHit> uncheckedHits = new LinkedList<CalorimeterHit>();
+ uncheckedHits.add(currentHit);
+
+ // While there are still unchecked hits in the cluster.
+ while (uncheckedHits.size() > 0) {
+
+ // Get the first hit and add it to the cluster.
+ CalorimeterHit clusterHit = uncheckedHits.removeFirst();
+
+ // Add hit to the cluster.
+ clusterHits.add(clusterHit);
+
+ // Remove the hit from the clusterable list.
+ clusterable.remove(clusterHit);
+
+ // Loop over the neighbors and add IDs with hits to the unchecked list.
+ for (Long neighborHitID : this.findNeighborHitIDs(clusterHit, hitMap)) {
+ CalorimeterHit neighborHit = hitMap.get(neighborHitID);
+ if (clusterable.contains(neighborHit)) {
+ uncheckedHits.add(neighborHit);
+ }
+ }
+ }
+
+ // Are there enough hits in cluster?
+ if (clusterHits.size() >= this.minimumClusterSize) {
+ // Create cluster and add to the output list.
+ clusters.add(createCluster(clusterHits));
+ }
+ }
+
+ return clusters;
+ }
+
+ Cluster createCluster(List<CalorimeterHit> clusterHits) {
+ BaseCluster cluster = new BaseCluster();
+ double totalEnergy = 0;
+ for (CalorimeterHit clusterHit : clusterHits) {
+ cluster.addHit(clusterHit);
+ totalEnergy += clusterHit.getCorrectedEnergy();
+ }
+ cluster.setEnergy(totalEnergy);
+ return cluster;
+ }
+
+ /**
+ * This method takes a list of potential cluster hits and applies selection cuts,
+ * returning a new list that has the hit lists which did not pass the cuts removed.
+ * @param clusteredHitLists The input hit lists.
+ * @return The hit lists that passed the cuts.
+ */
+ List<Cluster> applyClusterCuts(List<Cluster> inputClusters) {
+ List<Cluster> selectedClusters = new ArrayList<Cluster>();
+ for (Cluster cluster : inputClusters) {
+ Map<Integer, Set<CalorimeterHit>> rowMap = new HashMap<Integer, Set<CalorimeterHit>>();
+ for (CalorimeterHit hit : cluster.getCalorimeterHits()) {
+ int row = helper.getValue(new Identifier(hit.getCellID()), "iy");
+ if (rowMap.get(row) == null) {
+ rowMap.put(row, new HashSet<CalorimeterHit>());
+ }
+ rowMap.get(row).add(hit);
+ }
+ if (rowMap.size() >= minimumRows) {
+ boolean okay = true;
+ rowMapLoop: for (Entry<Integer, Set<CalorimeterHit>> entries : rowMap.entrySet()) {
+ if (entries.getValue().size() > maximumHitsPerRow) {
+ okay = false;
+ break rowMapLoop;
+ }
+ }
+ if (okay) {
+ selectedClusters.add(cluster);
+ }
+ }
+ }
+ return selectedClusters;
+ }
+
+}
Added: java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java (added)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java Mon Dec 15 23:04:00 2014
@@ -0,0 +1,280 @@
+package org.hps.analysis.ecal.cosmic;
+
+import hep.aida.IAnalysisFactory;
+import hep.aida.IDataPointSet;
+import hep.aida.IFitFactory;
+import hep.aida.IFitResult;
+import hep.aida.IFitter;
+import hep.aida.IFunction;
+import hep.aida.IFunctionFactory;
+import hep.aida.IHistogram1D;
+import hep.aida.ref.fitter.FitResult;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.conditions.database.TableConstants;
+import org.hps.conditions.ecal.EcalChannel;
+import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
+import org.hps.conditions.ecal.EcalChannelConstants;
+import org.hps.conditions.ecal.EcalConditions;
+import org.lcsim.conditions.ConditionsManager;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.RawTrackerHit;
+import org.lcsim.geometry.Detector;
+import org.lcsim.geometry.subdetector.HPSEcal3;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+
+/**
+ * <p>
+ * This Driver will perform a function fit on ECAL window mode data
+ * to determine the likelihood of a signal being present, e.g. from a cosmic
+ * ray MIP signal. By default, the mean and sigma are fixed in the fit, and the
+ * pedestal and normalization are allowed to float. The pedestal can also be
+ * configured as fixed using the {@link #setFixPedestal(boolean)} method.
+ * <p>
+ * Those hits with a signal significance greater than a settable
+ * threshold (by default set to 4 sigma) will be written into an output collection
+ * of selected hits that can be used by other Drivers.
+ *
+ * @author Jeremy McCormick <[log in to unmask]>
+ * @author Tim Nelson <[log in to unmask]>
+ */
+public class DualThresholdSignalFitDriver extends Driver {
+
+ // ECAL conditions data.
+ EcalConditions conditions = null;
+ EcalChannelCollection channels = null;
+
+ // AIDA setup.
+ AIDA aida = AIDA.defaultInstance();
+ IAnalysisFactory analysisFactory = aida.analysisFactory();
+ IFunctionFactory functionFactory = analysisFactory.createFunctionFactory(null);
+ IFitFactory fitFactory = analysisFactory.createFitFactory();
+ IFitter fitter = fitFactory.createFitter();
+
+ // DPS used to fit a hit's ADC samples.
+ IDataPointSet adcDataPointSet;
+
+ // Per channel histograms filled when doing the fit.
+ Map<EcalChannel, IHistogram1D> signalNormHistograms = new HashMap<EcalChannel, IHistogram1D>();
+ Map<EcalChannel, IHistogram1D> pedestalNormHistograms = new HashMap<EcalChannel, IHistogram1D>();
+ Map<EcalChannel, IHistogram1D> signalSignificanceHistograms = new HashMap<EcalChannel, IHistogram1D>();
+
+ // The function that will be used for the signal fit.
+ IFunction fitFunction;
+
+ // The output hits collection with the selected hits.
+ String outputHitsCollectionName = "EcalCosmicReadoutHits";
+
+ // The input hits collection with all the raw data hits.
+ String inputHitsCollectionName = "EcalReadoutHits";
+
+ HPSEcal3 ecal = null;
+ static String ecalName = "Ecal";
+
+ // The minimum number of required hits for event processing to continue.
+ int minimumHits = 3;
+
+ // This determines whether the pedestal is fixed in the fit parameters.
+ boolean fixPedestal = false;
+
+ boolean printFits = false;
+
+ // This is the required significance for signal hits (4 sigma default).
+ //static double signalSignificanceThreshold = 4.0;
+
+ double tightSignificanceThreshold = 3.0;
+ double looseSignificanceThreshold = 2.0;
+
+ String tightOutputHitsCollectionName = "Tight" + outputHitsCollectionName;
+ String looseOutputHitsCollectionName = "Loose" + outputHitsCollectionName;
+
+ // Global fit parameters for Moyal distribution.
+ static double signalMean = 45.698;
+ static double signalSigma = 2.2216;
+
+ // The initial value of the function normalization, which is not fixed in the fit.
+ static double norm = 60.0;
+
+ public void setTightSignificanceThreshold(double tightSignificanceThreshold) {
+ this.tightSignificanceThreshold = tightSignificanceThreshold;
+ }
+
+ public void setLooseSignificanceThreshold(double looseSignificanceThreshold) {
+ this.looseSignificanceThreshold = looseSignificanceThreshold;
+ }
+
+ public void printFits(boolean printFits) {
+ this.printFits = printFits;
+ }
+
+ /**
+ * Set the output hits collection name for the selected hits.
+ * @param outputHitsCollectionName The output hits collection name.
+ */
+ public void setOutputHitsCollectionName(String outputHitsCollectionName) {
+ this.outputHitsCollectionName = outputHitsCollectionName;
+ }
+
+ /**
+ * Set the input RawTrackerHit collection name used for the hit selection.
+ * @param inputHitsCollectionName The input hits collection name.
+ */
+ public void setInputHitsCollectionName(String inputHitsCollectionName) {
+ this.inputHitsCollectionName = inputHitsCollectionName;
+ }
+
+ /**
+ * Set the minimum number of required hits to continue processing this event.
+ * By default this is 3 hits.
+ * @param minimumHits The minimum number of hits.
+ */
+ public void setMinimumHits(int minimumHits) {
+ this.minimumHits = minimumHits;
+ }
+
+ /**
+ * Set whether the pedestal is fixed in the signal fit. By default this is false.
+ * @param fixPedestal True to fix the pedestal in the signal fit.
+ */
+ public void setFixPedestal(boolean fixPedestal) {
+ this.fixPedestal = fixPedestal;
+ }
+
+ /**
+ * Initialize conditions dependent class variables.
+ * @param detector The current Detector object.
+ */
+ public void detectorChanged(Detector detector) {
+ ecal = (HPSEcal3)detector.getSubdetector(ecalName);
+ conditions = ConditionsManager.defaultInstance().getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
+ channels = conditions.getChannelCollection();
+ for (EcalChannel channel : conditions.getChannelCollection()) {
+ signalNormHistograms.put(channel, aida.histogram1D(inputHitsCollectionName + "/Signal Norm : Channel " + String.format("%03d", channel.getChannelId()), 500, 0, 500.));
+ pedestalNormHistograms.put(channel, aida.histogram1D(inputHitsCollectionName + "/Pedestal Norm : Channel " + String.format("%03d", channel.getChannelId()), 500, 0, 500.));
+ signalSignificanceHistograms.put(channel, aida.histogram1D(inputHitsCollectionName + "/Signal Significance : Channel " + String.format("%03d", channel.getChannelId()), 200, -5., 35.));
+ }
+ }
+
+ /**
+ * Perform start of job initialize.
+ * The DataPointSet for the ADC values is initialized and global fit parameters are set here.
+ */
+ public void startOfData() {
+ adcDataPointSet = aida.analysisFactory().createDataPointSetFactory(null).create("ADC DataPointSet", 2);
+
+ fitFunction = new MoyalFitFunction();
+ fitFunction.setParameter("mpv", signalMean);
+ fitFunction.setParameter("width", signalSigma);
+ fitFunction.setParameter("norm", norm);
+
+ fitter.fitParameterSettings("mpv").setFixed(true);
+ fitter.fitParameterSettings("width").setFixed(true);
+ if (fixPedestal) {
+ fitter.fitParameterSettings("pedestal").setFixed(true);
+ }
+ }
+
+ /**
+ * Process the event, performing a signal fit for every raw data hit in the input collection.
+ * The hits that pass the sigma selection cut are added to a new hits collection, which can be
+ * converted to a CalorimeterHit collection and then clustered.
+ * @throw NextEventException if there are not enough hits that pass the selection cut.
+ */
+ public void process(EventHeader event) {
+ if (event.hasCollection(RawTrackerHit.class, inputHitsCollectionName)) {
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, inputHitsCollectionName);
+ //List<RawTrackerHit> selectedHitsList = new ArrayList<RawTrackerHit>();
+ List<RawTrackerHit> looseHitsList = new ArrayList<RawTrackerHit>();
+ List<RawTrackerHit> tightHitsList = new ArrayList<RawTrackerHit>();
+
+ for (RawTrackerHit hit : hits) {
+ EcalChannel channel = channels.findGeometric(hit.getCellID());
+ if (channel != null) {
+
+ EcalChannelConstants channelConstants = conditions.getChannelConstants(channel);
+ double noise = channelConstants.getCalibration().getNoise();
+
+ // Clear the DPS from previous fit.
+ adcDataPointSet.clear();
+
+ // Loop over all ADC values of the hit.
+ for (int adcSample = 0; adcSample < hit.getADCValues().length; adcSample++) {
+ // Insert a DP into the DPS for each sample.
+ adcDataPointSet.addPoint();
+
+ // Coordinate 1 is the ADC sample number.
+ adcDataPointSet.point(adcSample).coordinate(0).setValue(adcSample);
+
+ // Coordinate 2 is the ADC sample value and its errors, which is set to the
+ // noise from the EcalCalibration condition object for plus and minus.
+ adcDataPointSet.point(adcSample).coordinate(1).setValue(hit.getADCValues()[adcSample]);
+ adcDataPointSet.point(adcSample).coordinate(1).setErrorMinus(noise);
+ adcDataPointSet.point(adcSample).coordinate(1).setErrorPlus(noise);
+ }
+
+ // Fit the ADC signal.
+ IFitResult fitResult = fitAdcSamples(channel, adcDataPointSet);
+
+ if (printFits) {
+ ((FitResult)fitResult).printResult();
+ }
+
+ // Calculate the signal significance which is norm over error.
+ double signalSignificance = fitResult.fittedParameter("norm") / fitResult.errors()[1];
+
+ // Fill signal significance histogram.
+ this.signalSignificanceHistograms.get(channel).fill(signalSignificance);
+
+ // Is the significance over the threshold?
+ if (signalSignificance >= this.looseSignificanceThreshold) {
+ //System.out.println(fitResult.fittedParameter("norm") + " " + fitResult.errors()[1] + " " + signalSignificance);
+ // Add the hit to the output list.
+ looseHitsList.add(hit);
+ if (signalSignificance >= this.tightSignificanceThreshold) {
+ tightHitsList.add(hit);
+ }
+ }
+ } else {
+ throw new RuntimeException("EcalChannel not found for cell ID 0x" + String.format("%08d", Long.toHexString(hit.getCellID())));
+ }
+ }
+
+ // Is there at least one tight hit and at least 3 loose hits?
+ if (!tightHitsList.isEmpty() && looseHitsList.size() >= this.minimumHits) {
+
+ // Write loose hits list.
+ event.put(tightOutputHitsCollectionName, tightHitsList, RawTrackerHit.class, event.getMetaData(hits).getFlags(), ecal.getReadout().getName());
+ event.getMetaData(tightHitsList).setSubset(true);
+
+ // Write tight hits list.
+ event.put(looseOutputHitsCollectionName, looseHitsList, RawTrackerHit.class, event.getMetaData(hits).getFlags(), ecal.getReadout().getName());
+ event.getMetaData(looseHitsList).setSubset(true);
+
+ } else {
+ throw new NextEventException();
+ }
+ }
+ }
+
+ /**
+ * Fit all of the ADC samples in a hit using a DataPointSet and then return the fit result.
+ * @param channel The ECAL channel conditions information.
+ * @param adcDataPointSet The DataPointSet to use for the fit containing all 100 ADC samples.
+ * @return The significance which is the normalization divided by its error.
+ */
+ IFitResult fitAdcSamples(EcalChannel channel, IDataPointSet adcDataPointSet) {
+ EcalChannelConstants channelConstants = conditions.getChannelConstants(channel);
+ fitFunction.setParameter("pedestal", channelConstants.getCalibration().getPedestal());
+ IFitResult fitResult = fitter.fit(adcDataPointSet, fitFunction);
+ this.signalNormHistograms.get(channel).fill(fitResult.fittedParameter("norm"));
+ this.pedestalNormHistograms.get(channel).fill(fitResult.fittedParameter("pedestal"));
+ return fitResult;
+ }
+
+
+}
Modified: java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/MoyalFitFunction.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/MoyalFitFunction.java (original)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/MoyalFitFunction.java Mon Dec 15 23:04:00 2014
@@ -19,7 +19,7 @@
public MoyalFitFunction(String title) {
super();
this.variableNames = new String[] { "x0" };
- this.parameterNames = new String[] { "mpv", "width", "norm", "pedestal" };
+ this.parameterNames = new String[] { "pedestal", "norm", "mpv", "width" };
init(title);
}
Modified: java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java
=============================================================================
--- java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java (original)
+++ java/trunk/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java Mon Dec 15 23:04:00 2014
@@ -72,7 +72,7 @@
// The input hits collection with all the raw data hits.
String inputHitsCollectionName = "EcalReadoutHits";
-
+
HPSEcal3 ecal = null;
static String ecalName = "Ecal";
@@ -82,16 +82,22 @@
// This determines whether the pedestal is fixed in the fit parameters.
boolean fixPedestal = false;
+ boolean printFits = false;
+
// This is the required significance for signal hits (4 sigma default).
static double signalSignificanceThreshold = 4.0;
- // Global fit parameters.
- static double signalMean = 45.857 - 0.5; // Subtracted because of binning effect in profile histogram. Fix this!
- static double signalSigma = 1.9256;
+ // Global fit parameters for Moyal distribution.
+ static double signalMean = 45.698;
+ static double signalSigma = 2.2216;
// The initial value of the function normalization, which is not fixed in the fit.
static double norm = 60.0;
-
+
+ public void printFits(boolean printFits) {
+ this.printFits = printFits;
+ }
+
/**
* Set the output hits collection name for the selected hits.
* @param outputHitsCollectionName The output hits collection name.
@@ -196,18 +202,20 @@
// Fit the ADC signal.
IFitResult fitResult = fitAdcSamples(channel, adcDataPointSet);
-
- ((FitResult)fitResult).printResult();
+
+ if (printFits) {
+ ((FitResult)fitResult).printResult();
+ }
// Calculate the signal significance which is norm over error.
- double signalSignificance = fitResult.fittedParameter("norm") / fitResult.errors()[2];
+ double signalSignificance = fitResult.fittedParameter("norm") / fitResult.errors()[1];
// Fill signal significance histogram.
this.signalSignificanceHistograms.get(channel).fill(signalSignificance);
// Is the significance over the threshold?
if (signalSignificance >= signalSignificanceThreshold) {
- System.out.println(fitResult.fittedParameter("norm") + " " + fitResult.errors()[2] + " " + signalSignificance);
+ //System.out.println(fitResult.fittedParameter("norm") + " " + fitResult.errors()[1] + " " + signalSignificance);
// Add the hit to the output list.
selectedHitsList.add(hit);
}
|