Author: [log in to unmask]
Date: Wed Dec 10 12:02:20 2014
New Revision: 1668
Log:
Checking in current snapshot of ECAL cosmic analysis code. This will all be subsequently moved from my user area.
Added:
java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitFitDriver.java
java/trunk/users/src/main/java/org/hps/users/jeremym/EcalWindowModeFitFunction.java
Modified:
java/trunk/users/src/main/java/org/hps/users/jeremym/EcalADCProfilePlotsDriver.java
java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterDriver.java
java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterPlotsDriver.java
java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitSelectionDriver.java
java/trunk/users/src/main/java/org/hps/users/jeremym/LandauFunction.java
Modified: java/trunk/users/src/main/java/org/hps/users/jeremym/EcalADCProfilePlotsDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/EcalADCProfilePlotsDriver.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/EcalADCProfilePlotsDriver.java Wed Dec 10 12:02:20 2014
@@ -1,11 +1,18 @@
package org.hps.users.jeremym;
import hep.aida.IAnalysisFactory;
+import hep.aida.IFitFactory;
+import hep.aida.IFitResult;
+import hep.aida.IFitter;
+import hep.aida.IFunction;
+import hep.aida.IFunctionFactory;
import hep.aida.IProfile1D;
+import hep.aida.ref.fitter.FitResult;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import org.hps.conditions.database.TableConstants;
import org.hps.conditions.ecal.EcalChannel;
@@ -27,13 +34,13 @@
EcalChannelCollection channels = null;
Map<EcalChannel, IProfile1D> adcProfiles = new HashMap<EcalChannel, IProfile1D>();
AIDA aida = AIDA.defaultInstance();
- IAnalysisFactory analysisFactory = aida.analysisFactory();
+ IAnalysisFactory analysisFactory = aida.analysisFactory();
String inputHitsCollectionName = "EcalReadoutHits";
-
+
public void setInputHitsCollectionName(String inputHitsCollectionName) {
this.inputHitsCollectionName = inputHitsCollectionName;
}
-
+
public void detectorChanged(Detector detector) {
conditions = ConditionsManager.defaultInstance().getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
channels = conditions.getChannelCollection();
@@ -58,5 +65,5 @@
}
}
}
- }
+ }
}
Modified: java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterDriver.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterDriver.java Wed Dec 10 12:02:20 2014
@@ -89,13 +89,13 @@
calCluster.setEnergy(totalEnergy);
clusterCollection.add(calCluster);
}
- if (clusterCollection.size() > 0) {
- int flags = 1 << LCIOConstants.CLBIT_HITS;
- event.put(outputClusterCollectionName, clusterCollection, Cluster.class, flags);
- //System.out.println("added " + clusterCollection.size() + " clusters to " + outputClusterCollectionName);
- } else {
- throw new NextEventException();
- }
+ //if (clusterCollection.size() > 0) {
+ int flags = 1 << LCIOConstants.CLBIT_HITS;
+ event.put(outputClusterCollectionName, clusterCollection, Cluster.class, flags);
+ //System.out.println("added " + clusterCollection.size() + " clusters to " + outputClusterCollectionName);
+ //} else {
+ // throw new NextEventException();
+ //}
}
}
Modified: java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterPlotsDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterPlotsDriver.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicClusterPlotsDriver.java Wed Dec 10 12:02:20 2014
@@ -1,11 +1,26 @@
package org.hps.users.jeremym;
import hep.aida.IAnalysisFactory;
+import hep.aida.IFitFactory;
+import hep.aida.IFitResult;
+import hep.aida.IFitter;
+import hep.aida.IFunction;
+import hep.aida.IFunctionFactory;
+import hep.aida.IPlotter;
+import hep.aida.IPlotterFactory;
+import hep.aida.IPlotterStyle;
import hep.aida.IProfile1D;
-
+import hep.aida.ref.fitter.FitResult;
+import hep.aida.ref.function.AbstractIFunction;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.PrintWriter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import org.hps.conditions.database.TableConstants;
import org.hps.conditions.ecal.EcalChannel;
@@ -28,25 +43,57 @@
EcalConditions conditions = null;
EcalChannelCollection channels = null;
+ IProfile1D combinedSignalProfile;
Map<EcalChannel, IProfile1D> adcProfiles = new HashMap<EcalChannel, IProfile1D>();
AIDA aida = AIDA.defaultInstance();
IAnalysisFactory analysisFactory = aida.analysisFactory();
+ IFunctionFactory functionFactory = aida.analysisFactory().createFunctionFactory(null);
+ IFitFactory fitFactory = aida.analysisFactory().createFitFactory();
+ IPlotterFactory plotterFactory = aida.analysisFactory().createPlotterFactory();
String inputClusterCollectionName = "EcalCosmicClusters";
String rawHitsCollectionName = "EcalCosmicReadoutHits";
-
+ boolean doFits = true;
+ boolean writePulseShapeParameters = true;
+ boolean printFitResults = false;
+ String pulseShapeFileName = "ecal_pulse_shape_parameters.txt";
+ StringBuffer buffer;
+
+ public void setDoFits(boolean doFits) {
+ this.doFits = doFits;
+ }
+
+ public void setWritePulseShapeParameters(boolean writePulseShapeParameters) {
+ this.writePulseShapeParameters = writePulseShapeParameters;
+ }
+
+ public void setPulseShapeFileName(String calibrationsOutputFileName) {
+ this.pulseShapeFileName = calibrationsOutputFileName;
+ }
+
public void setInputHitsCollectionName(String inputClusterCollectionName) {
this.inputClusterCollectionName = inputClusterCollectionName;
}
public void setRawHitsCollectionName(String rawHitsCollectionName) {
this.rawHitsCollectionName = rawHitsCollectionName;
+ }
+
+ public void setPrintFitResults(boolean printFitResults) {
+ this.printFitResults = printFitResults;
+ }
+
+ public void startOfData() {
+ combinedSignalProfile = aida.profile1D(inputClusterCollectionName + "/Combined Signal Profile", 100, 0., 100.);
}
public void detectorChanged(Detector detector) {
conditions = ConditionsManager.defaultInstance().getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
channels = conditions.getChannelCollection();
for (EcalChannel channel : conditions.getChannelCollection()) {
- adcProfiles.put(channel, aida.profile1D(inputClusterCollectionName + "/ADC Values : " + String.format("%03d", channel.getChannelId()), 100, 0, 100));
+ IProfile1D profile = aida.profile1D(inputClusterCollectionName + "/ADC Values : Channel " + String.format("%03d", channel.getChannelId()), 100, 0, 100);
+ profile.annotation().addItem("xAxisLabel", "ADC Sample");
+ profile.annotation().addItem("yAxisLabel", "Counts");
+ adcProfiles.put(channel, profile);
}
}
@@ -64,6 +111,9 @@
for (int adcIndex = 0; adcIndex < rawHit.getADCValues().length; adcIndex++) {
// Fill the Profile1D with ADC value.
profile.fill(adcIndex, rawHit.getADCValues()[adcIndex]);
+
+ // Fill combined Profile histogram.
+ combinedSignalProfile.fill(adcIndex, rawHit.getADCValues()[adcIndex]);
}
} else {
throw new RuntimeException("EcalChannel not found for cell ID 0x" + String.format("%08d", Long.toHexString(rawHit.getCellID())));
@@ -83,5 +133,127 @@
}
return rawHitMap;
}
-
-}
+
+ public void endOfData() {
+ if (doFits) {
+ doFits();
+ }
+
+ if (this.writePulseShapeParameters) {
+ PrintWriter out = null;
+ try {
+ out = new PrintWriter(this.pulseShapeFileName);
+ out.print(buffer.toString());
+ } catch (FileNotFoundException e) {
+ throw new RuntimeException(e);
+ } finally {
+ if (out != null) {
+ out.close();
+ }
+ }
+ } else {
+ System.out.println();
+ System.out.println("Printing pulse shape parameters ...");
+ System.out.println(buffer.toString());
+ System.out.println();
+ }
+ }
+
+ private void doFits() {
+ File plotDir = new File("fits");
+ plotDir.mkdir();
+
+ buffer = new StringBuffer();
+ buffer.append("ecal_channel_id t0 pulse_width");
+ buffer.append('\n');
+
+ AbstractIFunction fitFunction = new EcalWindowModeFitFunction();
+ functionFactory.catalog().add("ecal_fit_function", fitFunction);
+ for (Entry<EcalChannel, IProfile1D> entry : this.adcProfiles.entrySet()) {
+ doFit(entry.getKey(), entry.getValue());
+ }
+
+ fitCombinedSignalProfile(this.combinedSignalProfile);
+ }
+
+ public void fitCombinedSignalProfile(IProfile1D combinedSignalProfile) {
+ IFunction function = functionFactory.createFunctionByName("ecal_fit_function", "ecal_fit_function");
+ function.setParameter("mean", 46);
+ function.setParameter("sigma", 2);
+ function.setParameter("pedestal", 100);
+ function.setParameter("norm", 60.0);
+
+ IFitter fitter = fitFactory.createFitter();
+ IFitResult fitResult = fitter.fit(combinedSignalProfile, function);
+
+ if (printFitResults) {
+ System.out.println();
+ System.out.println("Printing fit result for channel Combined Signal Profile");
+ ((FitResult)fitResult).printResult();
+ System.out.println();
+ }
+
+ IPlotter plotter = plotterFactory.create();
+ IPlotterStyle functionStyle = plotterFactory.createPlotterStyle();
+ functionStyle.dataStyle().outlineStyle().setColor("red");
+ functionStyle.legendBoxStyle().setVisible(true);
+ functionStyle.statisticsBoxStyle().setVisible(true);
+ IPlotterStyle plotStyle = plotterFactory.createPlotterStyle();
+ plotStyle.dataStyle().fillStyle().setColor("blue");
+ plotStyle.legendBoxStyle().setVisible(true);
+ plotStyle.statisticsBoxStyle().setVisible(true);
+
+ plotter.createRegion();
+ plotter.region(0).plot(combinedSignalProfile, plotStyle);
+ plotter.region(0).plot(fitResult.fittedFunction(), functionStyle);
+ try {
+ plotter.writeToFile("fits" + File.separator + "CombinedSignalProfileFit.png");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ buffer.append("Combined Signal Profile " + fitResult.fittedFunction().parameter("mean") + " " + fitResult.fittedFunction().parameter("sigma"));
+ buffer.append('\n');
+ }
+
+ public void doFit(EcalChannel channel, IProfile1D profile) {
+
+ IFunction function = functionFactory.createFunctionByName("ecal_fit_function", "ecal_fit_function");
+ function.setParameter("mean", 48);
+ function.setParameter("sigma", 2);
+ function.setParameter("pedestal", conditions.getChannelConstants(channel).getCalibration().getPedestal());
+ function.setParameter("norm", 60.0);
+
+ IFitter fitter = fitFactory.createFitter();
+ IFitResult fitResult = fitter.fit(profile, function);
+
+ if (printFitResults) {
+ System.out.println();
+ System.out.println("Printing fit result for channel " + channel.getChannelId());
+ ((FitResult)fitResult).printResult();
+ System.out.println();
+ }
+
+ IPlotter plotter = plotterFactory.create();
+ IPlotterStyle functionStyle = plotterFactory.createPlotterStyle();
+ functionStyle.dataStyle().outlineStyle().setColor("red");
+ functionStyle.legendBoxStyle().setVisible(true);
+ functionStyle.statisticsBoxStyle().setVisible(true);
+ IPlotterStyle plotStyle = plotterFactory.createPlotterStyle();
+ plotStyle.dataStyle().fillStyle().setColor("blue");
+ plotStyle.legendBoxStyle().setVisible(true);
+ plotStyle.statisticsBoxStyle().setVisible(true);
+
+ plotter.createRegion();
+ plotter.region(0).plot(profile, plotStyle);
+ plotter.region(0).plot(fitResult.fittedFunction(), functionStyle);
+ try {
+ plotter.writeToFile("fits" + File.separator + "EcalChannel" + String.format("%03d", channel.getChannelId()) + "Fit.png");
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ buffer.append(channel.getChannelId() + " " + fitResult.fittedFunction().parameter("mean") + " " + fitResult.fittedFunction().parameter("sigma"));
+ buffer.append('\n');
+ }
+}
Added: java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitFitDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitFitDriver.java (added)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitFitDriver.java Wed Dec 10 12:02:20 2014
@@ -0,0 +1,238 @@
+package org.hps.users.jeremym;
+
+import hep.aida.IAnalysisFactory;
+import hep.aida.IDataPointSet;
+import hep.aida.IFitFactory;
+import hep.aida.IFitResult;
+import hep.aida.IFitter;
+import hep.aida.IFunction;
+import hep.aida.IFunctionFactory;
+import hep.aida.IHistogram1D;
+import hep.aida.ref.fitter.FitResult;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.hps.conditions.database.TableConstants;
+import org.hps.conditions.ecal.EcalChannel;
+import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
+import org.hps.conditions.ecal.EcalChannelConstants;
+import org.hps.conditions.ecal.EcalConditions;
+import org.lcsim.conditions.ConditionsManager;
+import org.lcsim.event.EventHeader;
+import org.lcsim.event.RawTrackerHit;
+import org.lcsim.geometry.Detector;
+import org.lcsim.geometry.subdetector.HPSEcal3;
+import org.lcsim.util.Driver;
+import org.lcsim.util.aida.AIDA;
+
+/**
+ * This Driver will perform a functional fit on ECAL window mode data
+ * to determine the likelihood of a signal being present, e.g. from a cosmic
+ * ray MIP signal. Those hits with a signal significance greater than a settable
+ * threshold (by default 4 sigma) will be written into an output collection
+ * of selected hits that can be used by other Drivers.
+ *
+ * @author Jeremy McCormick <[log in to unmask]>
+ * @author Tim Nelson <[log in to unmask]>
+ */
+public class EcalCosmicHitFitDriver extends Driver {
+
+ // ECAL conditions data.
+ EcalConditions conditions = null;
+ EcalChannelCollection channels = null;
+
+ // AIDA setup.
+ AIDA aida = AIDA.defaultInstance();
+ IAnalysisFactory analysisFactory = aida.analysisFactory();
+ IFunctionFactory functionFactory = analysisFactory.createFunctionFactory(null);
+ IFitFactory fitFactory = analysisFactory.createFitFactory();
+ IFitter fitter = fitFactory.createFitter();
+
+ // DPS used to fit a hit's ADC samples.
+ IDataPointSet adcDataPointSet;
+
+ // Per channel histograms filled when doing the fit.
+ Map<EcalChannel, IHistogram1D> signalNormHistograms = new HashMap<EcalChannel, IHistogram1D>();
+ Map<EcalChannel, IHistogram1D> pedestalNormHistograms = new HashMap<EcalChannel, IHistogram1D>();
+ Map<EcalChannel, IHistogram1D> signalSignificanceHistograms = new HashMap<EcalChannel, IHistogram1D>();
+
+ // The function that will be used for the signal fit.
+ IFunction fitFunction;
+
+ // The output hits collection with the selected hits.
+ String outputHitsCollectionName = "EcalCosmicReadoutHits";
+
+ // The input hits collection with all the raw data hits.
+ String inputHitsCollectionName = "EcalReadoutHits";
+
+ HPSEcal3 ecal = null;
+ static String ecalName = "Ecal";
+
+ // The minimum number of required hits for event processing to continue.
+ int minimumHits = 3;
+
+ // This determines whether the pedestal is fixed in the fit parameters.
+ boolean fixPedestal = false;
+
+ // This is the required significance for signal hits (4 sigma default).
+ static double signalSignificanceThreshold = 4.0;
+
+ // Global fit parameters.
+ static double signalMean = 45.857 - 0.5; // Subtracted because of binning effect in profile histogram. Fix this!
+ static double signalSigma = 1.9256;
+
+ // The initial value of the function normalization, which is not fixed in the fit.
+ static double norm = 60.0;
+
+ /**
+ * Set the output hits collection name for the selected hits.
+ * @param outputHitsCollectionName The output hits collection name.
+ */
+ public void setOutputHitsCollectionName(String outputHitsCollectionName) {
+ this.outputHitsCollectionName = outputHitsCollectionName;
+ }
+
+ /**
+ * Set the input RawTrackerHit collection name used for the hit selection.
+ * @param inputHitsCollectionName The input hits collection name.
+ */
+ public void setInputHitsCollectionName(String inputHitsCollectionName) {
+ this.inputHitsCollectionName = inputHitsCollectionName;
+ }
+
+ /**
+ * Set the minimum number of required hits to continue processing this event.
+ * By default this is 3 hits.
+ * @param minimumHits The minimum number of hits.
+ */
+ public void setMinimumHits(int minimumHits) {
+ this.minimumHits = minimumHits;
+ }
+
+ /**
+ * Set whether the pedestal is fixed in the signal fit. By default this is false.
+ * @param fixPedestal True to fix the pedestal in the signal fit.
+ */
+ public void setFixPedestal(boolean fixPedestal) {
+ this.fixPedestal = fixPedestal;
+ }
+
+ /**
+ * Perform start of job setup using the detector and conditions information.
+ */
+ public void detectorChanged(Detector detector) {
+ ecal = (HPSEcal3)detector.getSubdetector(ecalName);
+ conditions = ConditionsManager.defaultInstance().getCachedConditions(EcalConditions.class, TableConstants.ECAL_CONDITIONS).getCachedData();
+ channels = conditions.getChannelCollection();
+ for (EcalChannel channel : conditions.getChannelCollection()) {
+ signalNormHistograms.put(channel, aida.histogram1D(inputHitsCollectionName + "/Signal Norm : Channel " + String.format("%03d", channel.getChannelId()), 500, 0, 500.));
+ pedestalNormHistograms.put(channel, aida.histogram1D(inputHitsCollectionName + "/Pedestal Norm : Channel " + String.format("%03d", channel.getChannelId()), 500, 0, 500.));
+ signalSignificanceHistograms.put(channel, aida.histogram1D(inputHitsCollectionName + "/Signal Significance : Channel " + String.format("%03d", channel.getChannelId()), 200, -5., 35.));
+ }
+ }
+
+ /**
+ * Perform initialization to create the DPS for the ADC values and configure the global fit parameters.
+ */
+ public void startOfData() {
+ adcDataPointSet = aida.analysisFactory().createDataPointSetFactory(null).create("ADC DataPointSet", 2);
+
+ fitFunction = new EcalWindowModeFitFunction();
+ fitFunction.setParameter("mean", signalMean);
+ fitFunction.setParameter("sigma", signalSigma);
+ fitFunction.setParameter("norm", norm);
+
+ fitter.fitParameterSettings("mean").setFixed(true);
+ fitter.fitParameterSettings("sigma").setFixed(true);
+ if (fixPedestal) {
+ fitter.fitParameterSettings("pedestal").setFixed(true);
+ }
+ }
+
+ /**
+ * Process the event, performing a signal fit for every raw data hit in the input collection.
+ * Those hits that pass the selection cut are added to a new hits collection that can be converted
+ * to a CalorimeterHit collection and clustered.
+ * @throw NextEventException if there are not enough hits that pass the selection cut.
+ */
+ public void process(EventHeader event) {
+ if (event.hasCollection(RawTrackerHit.class, inputHitsCollectionName)) {
+ List<RawTrackerHit> hits = event.get(RawTrackerHit.class, inputHitsCollectionName);
+ List<RawTrackerHit> selectedHitsList = new ArrayList<RawTrackerHit>();
+ for (RawTrackerHit hit : hits) {
+ EcalChannel channel = channels.findGeometric(hit.getCellID());
+ if (channel != null) {
+
+ EcalChannelConstants channelConstants = conditions.getChannelConstants(channel);
+ double noise = channelConstants.getCalibration().getNoise();
+
+ // Clear the DPS from previous fit.
+ adcDataPointSet.clear();
+
+ // Loop over all ADC values of the hit.
+ for (int adcSample = 0; adcSample < hit.getADCValues().length; adcSample++) {
+ // Insert a DP into the DPS for each sample.
+ adcDataPointSet.addPoint();
+
+ // Coordinate 1 is the ADC sample number.
+ adcDataPointSet.point(adcSample).coordinate(0).setValue(adcSample);
+
+ // Coordinate 2 is the ADC sample value and its errors, which is set to the
+ // noise from the EcalCalibration condition object for plus and minus.
+ adcDataPointSet.point(adcSample).coordinate(1).setValue(hit.getADCValues()[adcSample]);
+ adcDataPointSet.point(adcSample).coordinate(1).setErrorMinus(noise);
+ adcDataPointSet.point(adcSample).coordinate(1).setErrorPlus(noise);
+ }
+
+ // Fit the ADC signal.
+ IFitResult fitResult = fitAdcSamples(channel, adcDataPointSet);
+
+ // Calculate the signal significance which is norm over error.
+ double signalSignificance = fitResult.fittedParameter("norm") / fitResult.errors()[2];
+
+ // Fill signal significance histogram.
+ this.signalSignificanceHistograms.get(channel).fill(signalSignificance);
+
+ // Is the significance over the threshold?
+ if (signalSignificance >= signalSignificanceThreshold) {
+ System.out.println(fitResult.fittedParameter("norm") + " " + fitResult.errors()[2] + " " + signalSignificance);
+ // Add the hit to the output list.
+ selectedHitsList.add(hit);
+ }
+ } else {
+ throw new RuntimeException("EcalChannel not found for cell ID 0x" + String.format("%08d", Long.toHexString(hit.getCellID())));
+ }
+ }
+
+ // Is the hit list greater than the minimum hits?
+ if (selectedHitsList.size() >= minimumHits) {
+ // Write the hits to a new collection of selected hits.
+ event.put(outputHitsCollectionName, selectedHitsList, RawTrackerHit.class, event.getMetaData(hits).getFlags(), ecal.getReadout().getName());
+ } else {
+ // Discontinue processing this event because there aren't enough hits for clustering.
+ throw new NextEventException();
+ }
+ }
+ }
+
+ /**
+ * Fit the ADC samples of a hit, returning the signal significance.
+ * @param channel The ECAL channel information.
+ * @param adcDataPointSet The DPS to use for the fit containing all 100 ADC samples.
+ * @return The significance which is the normalization divided by its error.
+ */
+ IFitResult fitAdcSamples(EcalChannel channel, IDataPointSet adcDataPointSet) {
+ EcalChannelConstants channelConstants = conditions.getChannelConstants(channel);
+ fitFunction.setParameter("pedestal", channelConstants.getCalibration().getPedestal());
+ IFitResult fitResult = fitter.fit(adcDataPointSet, fitFunction);
+ this.signalNormHistograms.get(channel).fill(fitResult.fittedParameter("norm"));
+ this.pedestalNormHistograms.get(channel).fill(fitResult.fittedParameter("pedestal"));
+ //double signalSignificance = fitResult.fittedParameter("norm") / fitResult.errors()[2];
+ //this.signalSignificanceHistograms.get(channel).fill(signalSignificance);
+ //return signalSignificance;
+ return fitResult;
+ }
+}
Modified: java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitSelectionDriver.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitSelectionDriver.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/EcalCosmicHitSelectionDriver.java Wed Dec 10 12:02:20 2014
@@ -42,7 +42,7 @@
String outputHitsCollectionName = "EcalCosmicReadoutHits";
String inputHitsCollectionName = "EcalReadoutHits";
HPSEcal3 ecal = null;
- static String ecalName = "Ecal";
+ static String ecalName = "Ecal";
/**
* Set the sigma threshold for an ADC value.
Added: java/trunk/users/src/main/java/org/hps/users/jeremym/EcalWindowModeFitFunction.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/EcalWindowModeFitFunction.java (added)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/EcalWindowModeFitFunction.java Wed Dec 10 12:02:20 2014
@@ -0,0 +1,41 @@
+package org.hps.users.jeremym;
+
+import hep.aida.ref.function.AbstractIFunction;
+
+public class EcalWindowModeFitFunction extends AbstractIFunction {
+
+ LandauPdf landauPdf = new LandauPdf();
+
+ public EcalWindowModeFitFunction() {
+ this("");
+ }
+
+ public EcalWindowModeFitFunction(String title) {
+ super();
+ this.variableNames = new String[] { "x0" };
+ this.parameterNames = new String[] { "mean", "sigma", "norm", "pedestal" };
+ init(title);
+ }
+
+ @Override
+ public double value(double[] v) {
+ return this.parameter("pedestal") + this.parameter("norm") * landauPdf.getValue(v[0]);
+ }
+
+ @Override
+ public void setParameter(String key, double value) throws IllegalArgumentException {
+ super.setParameter(key, value);
+ if (key.equals("mean")) {
+ landauPdf.setMean(value);
+ } else if (key.equals("sigma")) {
+ landauPdf.setSigma(value);
+ }
+ }
+
+ @Override
+ public void setParameters(double[] parameters) throws IllegalArgumentException {
+ super.setParameters(parameters);
+ landauPdf.setMean(parameters[0]);
+ landauPdf.setSigma(parameters[1]);
+ }
+}
Modified: java/trunk/users/src/main/java/org/hps/users/jeremym/LandauFunction.java
=============================================================================
--- java/trunk/users/src/main/java/org/hps/users/jeremym/LandauFunction.java (original)
+++ java/trunk/users/src/main/java/org/hps/users/jeremym/LandauFunction.java Wed Dec 10 12:02:20 2014
@@ -4,26 +4,19 @@
public class LandauFunction extends AbstractIFunction {
- LandauPdf landauPdf = new LandauPdf();
-
+ LandauPdf landauPdf = new LandauPdf();
+
public LandauFunction() {
this("");
}
public LandauFunction(String title) {
-
- super();
-
- variableNames = new String[1];
- variableNames[0] = "x0";
-
- parameterNames = new String[2];
- parameterNames[0] = "mean";
- parameterNames[1] = "sigma";
-
+ super();
+ this.variableNames = new String[] { "x0" };
+ this.parameterNames = new String[] { "mean", "sigma" };
init(title);
}
-
+
@Override
public double value(double[] v) {
return landauPdf.getValue(v[0]);
@@ -33,10 +26,8 @@
public void setParameter(String key, double value) throws IllegalArgumentException {
super.setParameter(key, value);
if (key.equals("mean")) {
- System.out.println("set mean = " + value);
landauPdf.setMean(value);
} else if (key.equals("sigma")) {
- System.out.println("set sigma = " + value);
landauPdf.setSigma(value);
}
}
|