25 removed + 84 modified, total 109 files
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality/SVTHitMCEfficiency.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality/SVTHitMCEfficiency.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -2,23 +2,24 @@
import hep.aida.IHistogram1D;
import hep.aida.IHistogram2D;
-import hep.aida.IHistogramFactory;
import hep.aida.IProfile1D;
+import hep.aida.IProfile2D;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hps.recon.tracking.FittedRawTrackerHit;
+import org.hps.recon.tracking.ShapeFitParameters;
import org.lcsim.detector.tracker.silicon.SiSensor;
import org.lcsim.event.EventHeader;
+import org.lcsim.event.GenericObject;
import org.lcsim.event.LCRelation;
import org.lcsim.event.RawTrackerHit;
import org.lcsim.event.RelationalTable;
import org.lcsim.event.SimTrackerHit;
+import org.lcsim.event.TrackerHit;
import org.lcsim.event.base.BaseRelationalTable;
import org.lcsim.geometry.Detector;
-import org.lcsim.recon.tracking.digitization.sisim.SiTrackerHit;
-import org.lcsim.recon.tracking.digitization.sisim.SiTrackerHitStrip1D;
/**
* DQM driver for the monte carlo SVT hit efficiency April 29 -- first pass,
@@ -70,10 +71,16 @@
createLayerPlot(plotDir + "clusterEfficiency", kk, 50, -40, 40.);
createLayerPlot(plotDir + "readoutEfficiency", kk, 50, -40, 40.);
createLayerPlot(plotDir + "rthToClusterEfficiency", kk, 50, -40, 40.);
- createLayerPlot2D(plotDir + "toogoodFits", kk, 200, -100, 100, 100, 0, 20000);
- createLayerPlot2D(plotDir + "goodFits", kk, 200, -100, 100, 100, 0, 20000);
- createLayerPlot2D(plotDir + "badFits", kk, 200, -100, 100, 100, 0, 20000);
+ createLayerPlot2D(plotDir + "clusterEfficiency2D", kk, 50, -40, 40., 16, 0.5, 16.5);
+ createLayerPlot2D(plotDir + "rthToClusterEfficiency2D", kk, 50, -40, 40., 16, 0.5, 16.5);
+ createLayerPlot2D(plotDir + "allFits", kk, 200, -100, 100, 100, 0, 20000);
+// createLayerPlot2D(plotDir + "toogoodFits", kk, 200, -100, 100, 100, 0, 20000);
+// createLayerPlot2D(plotDir + "goodFits", kk, 200, -100, 100, 100, 0, 20000);
+// createLayerPlot2D(plotDir + "badFits", kk, 200, -100, 100, 100, 0, 20000);
+ createLayerPlot2D(plotDir + "fitT0ChiProb", kk, 200, -100, 100, 100, 0, 1.0);
+ createLayerPlot2D(plotDir + "fitAmpChiProb", kk, 200, 0, 20000, 100, 0, 1.0);
createLayerPlot1D(plotDir + "signalClusterT0", kk, 500, -100, 100);
+ createLayerPlot2D(plotDir + "badClusterFits", kk, 200, -100, 100, 100, 0, 20000);
}
resetEfficiencyMap();
}
@@ -87,11 +94,11 @@
if (!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) {
return;
}
- if (!event.hasCollection(FittedRawTrackerHit.class, fittedTrackerHitCollectionName)) {
+ if (!event.hasCollection(LCRelation.class, fittedTrackerHitCollectionName)) {
return;
}
- if (!event.hasCollection(SiTrackerHitStrip1D.class, siClusterCollectionName)) {
+ if (!event.hasCollection(TrackerHit.class, siClusterCollectionName)) {
return;
}
@@ -125,10 +132,10 @@
List<SimTrackerHit> simHits = event.get(SimTrackerHit.class, trackerHitCollectionName);
// make relational table for strip clusters to mc particle
- List<SiTrackerHitStrip1D> siClusters = event.get(SiTrackerHitStrip1D.class, siClusterCollectionName);
+ List<TrackerHit> siClusters = event.get(TrackerHit.class, siClusterCollectionName);
RelationalTable clustertosimhit = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
- for (SiTrackerHit cluster : siClusters) {
- for (RawTrackerHit rth : cluster.getRawHits()) {
+ for (TrackerHit cluster : siClusters) {
+ for (RawTrackerHit rth : (List<RawTrackerHit>) cluster.getRawHits()) {
Set<SimTrackerHit> simTrackerHits = rawtomc.allFrom(rth);
if (simTrackerHits != null) {
for (SimTrackerHit simhit : simTrackerHits) {
@@ -141,43 +148,63 @@
}
//relational tables from raw and fitted tracker hits to sim hit
+ RelationalTable rthtofit = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE, RelationalTable.Weighting.UNWEIGHTED);
RelationalTable fittomc = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
- List<FittedRawTrackerHit> fittedTrackerHits = event.get(FittedRawTrackerHit.class, fittedTrackerHitCollectionName);
- for (FittedRawTrackerHit hit : fittedTrackerHits) {
- Set<SimTrackerHit> simTrackerHits = rawtomc.allFrom(hit.getRawTrackerHit());
+ List<LCRelation> fittedTrackerHits = event.get(LCRelation.class, fittedTrackerHitCollectionName);
+ for (LCRelation hit : fittedTrackerHits) {
+ GenericObject oldfit = (GenericObject) rthtofit.to(FittedRawTrackerHit.getRawTrackerHit(hit));
+ if (oldfit == null || Math.abs(ShapeFitParameters.getT0(oldfit)) > Math.abs(FittedRawTrackerHit.getT0(hit))) {
+ rthtofit.add(FittedRawTrackerHit.getRawTrackerHit(hit), FittedRawTrackerHit.getShapeFitParameters(hit));
+ }
+ Set<SimTrackerHit> simTrackerHits = rawtomc.allFrom(FittedRawTrackerHit.getRawTrackerHit(hit));
for (SimTrackerHit simhit : simTrackerHits) {
fittomc.add(hit, simhit);
}
}
for (SimTrackerHit simhit : simHits) {
- Set<FittedRawTrackerHit> fittedRTH = fittomc.allTo(simhit);
- FittedRawTrackerHit signalHit = null;
- for (FittedRawTrackerHit frth : fittedRTH) {
- if (signalHit == null || Math.abs(frth.getT0()) < Math.abs(signalHit.getT0())) {
+ Set<LCRelation> fittedRTH = fittomc.allTo(simhit);
+ LCRelation signalHit = null;
+ for (LCRelation frth : fittedRTH) {
+ if (signalHit == null || Math.abs(FittedRawTrackerHit.getT0(frth)) < Math.abs(FittedRawTrackerHit.getT0(signalHit))) {
signalHit = frth;
}
}
if (signalHit != null) {
// System.out.format("chiprob %f, t0 %f, A %f\n", signalHit.getShapeFitParameters().getChiProb(), signalHit.getT0(), signalHit.getAmp());
- if (signalHit.getShapeFitParameters().getChiProb() > 0.95) {
- getLayerPlot2D(plotDir + "toogoodFits", simhit.getLayer()).fill(signalHit.getT0(), signalHit.getAmp());
- } else if (signalHit.getShapeFitParameters().getChiProb() < 0.05) {
- getLayerPlot2D(plotDir + "badFits", simhit.getLayer()).fill(signalHit.getT0(), signalHit.getAmp());
- } else {
- getLayerPlot2D(plotDir + "goodFits", simhit.getLayer()).fill(signalHit.getT0(), signalHit.getAmp());
- }
+ getLayerPlot2D(plotDir + "allFits", simhit.getLayer()).fill(FittedRawTrackerHit.getT0(signalHit), FittedRawTrackerHit.getAmp(signalHit));
+ getLayerPlot2D(plotDir + "fitT0ChiProb", simhit.getLayer()).fill(FittedRawTrackerHit.getT0(signalHit), ShapeFitParameters.getChiProb(FittedRawTrackerHit.getShapeFitParameters(signalHit)));
+ getLayerPlot2D(plotDir + "fitAmpChiProb", simhit.getLayer()).fill(FittedRawTrackerHit.getAmp(signalHit), ShapeFitParameters.getChiProb(FittedRawTrackerHit.getShapeFitParameters(signalHit)));
+// if (signalHit.getShapeFitParameters().getChiProb() > 0.95) {
+// getLayerPlot2D(plotDir + "toogoodFits", simhit.getLayer()).fill(signalHit.getT0(), signalHit.getAmp());
+// } else if (signalHit.getShapeFitParameters().getChiProb() < 0.05) {
+// getLayerPlot2D(plotDir + "badFits", simhit.getLayer()).fill(signalHit.getT0(), signalHit.getAmp());
+// } else {
+// getLayerPlot2D(plotDir + "goodFits", simhit.getLayer()).fill(signalHit.getT0(), signalHit.getAmp());
+// }
}
int gotCluster = 0;
- Set<SiTrackerHitStrip1D> clusters = clustertosimhit.allTo(simhit);
+ int[] gotClusterAtTime = new int[16];
+ Set<TrackerHit> clusters = clustertosimhit.allTo(simhit);
if (clusters != null) {
- for (SiTrackerHitStrip1D clust : clusters) {
+ for (TrackerHit clust : clusters) {
getLayerPlot1D(plotDir + "signalClusterT0", simhit.getLayer()).fill(clust.getTime());
+ for (int i = 0; i < 16; i++) {
+ if (Math.abs(clust.getTime()) < i + 1) {
+ gotClusterAtTime[i] = 1;
+ }
+ }
if (Math.abs(clust.getTime()) < t0Cut) {
gotCluster = 1;
+ } else {
+ for (RawTrackerHit rth : (List<RawTrackerHit>) clust.getRawHits()) {
+ GenericObject fit = (GenericObject) rthtofit.to(rth);
+ getLayerPlot2D(plotDir + "badClusterFits", simhit.getLayer()).fill(ShapeFitParameters.getT0(fit), ShapeFitParameters.getAmp(fit));
+ }
}
+
}
}
Set<RawTrackerHit> rawhits = rawtomc.allTo(simhit);
@@ -189,11 +216,21 @@
if (gotRawHit == 1) {
getLayerPlot(plotDir + "rthToClusterEfficiency", simhit.getLayer()).fill(y, gotCluster);
}
+ for (int i = 0; i < 16; i++) {
+ getLayerPlot2D(plotDir + "clusterEfficiency2D", simhit.getLayer()).fill(y, i + 1, gotClusterAtTime[i]);
+ if (gotRawHit == 1) {
+ getLayerPlot2D(plotDir + "rthToClusterEfficiency2D", simhit.getLayer()).fill(y, i + 1, gotClusterAtTime[i]);
+ }
+ }
}
}
@Override
public void fillEndOfRunPlots() {
+ for (int kk = 1; kk < 13; kk++) {
+ getMean2D(getLayerPlot2D(plotDir + "clusterEfficiency2D", kk));
+ getMean2D(getLayerPlot2D(plotDir + "rthToClusterEfficiency2D", kk));
+ }
}
@Override
@@ -224,6 +261,33 @@
return aida.profile1D(prefix + "_layer" + layer, nchan, min, max);
}
+ private void getMean2D(IHistogram2D hist2D) {
+ int nx = hist2D.xAxis().bins();
+ int ny = hist2D.yAxis().bins();
+ double[][] means = new double[nx][ny];
+ for (int ix = 0; ix < nx; ix++) {
+ for (int iy = 0; iy < ny; iy++) {
+ means[ix][iy] = hist2D.binHeight(ix, iy) / hist2D.binEntries(ix, iy);
+ }
+ }
+ hist2D.reset();
+ for (int ix = 0; ix < nx; ix++) {
+ for (int iy = 0; iy < ny; iy++) {
+ double x = hist2D.xAxis().binCenter(ix);
+ double y = hist2D.yAxis().binCenter(iy);
+ hist2D.fill(x, y, means[ix][iy]);
+ }
+ }
+ }
+
+ private IProfile2D getLayerProfile2D(String prefix, int layer) {
+ return aida.profile2D(prefix + "_layer" + layer);
+ }
+
+ private IProfile2D createLayerProfile2D(String prefix, int layer, int nx, double minX, double maxX, int ny, double minY, double maxY) {
+ return aida.profile2D(prefix + "_layer" + layer, nx, minX, maxX, ny, minY, maxY);
+ }
+
private IHistogram1D getLayerPlot1D(String prefix, int layer) {
return aida.histogram1D(prefix + "_layer" + layer);
}
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality/TrackMCEfficiency.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality/TrackMCEfficiency.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -8,6 +8,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.hps.analysis.examples.LCIOTrackAnalysis;
import org.hps.analysis.examples.TrackAnalysis;
import org.hps.recon.tracking.FindableTrack;
import org.hps.recon.tracking.FittedRawTrackerHit;
@@ -19,6 +20,7 @@
import org.lcsim.event.RelationalTable;
import org.lcsim.event.SimTrackerHit;
import org.lcsim.event.Track;
+import org.lcsim.event.TrackerHit;
import org.lcsim.event.base.BaseRelationalTable;
import org.lcsim.fit.helicaltrack.HelicalTrackCross;
import org.lcsim.fit.helicaltrack.HelixParamCalculator;
@@ -28,9 +30,11 @@
import org.lcsim.recon.tracking.digitization.sisim.SiTrackerHitStrip1D;
/**
- * DQM driver for the monte carlo track efficiency; makes a bunch of efficiency vs variable plots
- * for all tracks and just electrons from trident/A' event, as well as "findable" tracks
- * use the debugTrackEfficiency flag to print out info regarding individual failed events
+ * DQM driver for the monte carlo track efficiency; makes a bunch of efficiency
+ * vs variable plots for all tracks and just electrons from trident/A' event, as
+ * well as "findable" tracks use the debugTrackEfficiency flag to print out info
+ * regarding individual failed events
+ *
* @author mgraham on Mar 28, 2014
*/
// TODO: Add some quantities for DQM monitoring: e.g. <efficiency>, <eff>_findable
@@ -42,6 +46,8 @@
private String trackerHitCollectionName = "TrackerHits";
private String siClusterCollectionName = "StripClusterer_SiTrackerHitStrip1D";
private String rotatedMCRelationsCollectionName = "RotatedHelicalTrackMCRelations";
+ private final String helicalTrackHitRelationsCollectionName = "HelicalTrackHitRelations";
+ private final String rotatedHelicalTrackHitRelationsCollectionName = "RotatedHelicalTrackHitRelations";
private String trackCollectionName = "MatchedTracks";
private String trackerName = "Tracker";
private Detector detector = null;
@@ -62,7 +68,8 @@
private static final String nameStrip = "Tracker_TestRunModule_";
private List<SiSensor> sensors;
private boolean debugTrackEfficiency = false;
- private String plotDir = "TrackMCEfficiency/";
+ private String plotDir = "TrackMCEfficiency/";
+
public void setHelicalTrackHitCollectionName(String helicalTrackHitCollectionName) {
this.helicalTrackHitCollectionName = helicalTrackHitCollectionName;
}
@@ -82,14 +89,14 @@
aida.tree().cd("/");
IHistogramFactory hf = aida.histogramFactory();
- peffFindable = hf.createProfile1D(plotDir+"Findable Efficiency vs p", "", 20, 0., beamP);
- phieffFindable = hf.createProfile1D(plotDir+"Findable Efficiency vs phi", "", 25, -0.25, 0.25);
- ctheffFindable = hf.createProfile1D(plotDir+"Findable Efficiency vs cos(theta)", "", 25, -0.25, 0.25);
+ peffFindable = hf.createProfile1D(plotDir + "Findable Efficiency vs p", "", 20, 0., beamP);
+ phieffFindable = hf.createProfile1D(plotDir + "Findable Efficiency vs phi", "", 25, -0.25, 0.25);
+ ctheffFindable = hf.createProfile1D(plotDir + "Findable Efficiency vs cos(theta)", "", 25, -0.25, 0.25);
- peffElectrons = hf.createProfile1D(plotDir+"Electrons Efficiency vs p", "", 20, 0., beamP);
- phieffElectrons = hf.createProfile1D(plotDir+"Electrons Efficiency vs phi", "", 25, -0.25, 0.25);
- ctheffElectrons = hf.createProfile1D(plotDir+"Electrons Efficiency vs cos(theta)", "", 25, -0.25, 0.25);
-
+ peffElectrons = hf.createProfile1D(plotDir + "Electrons Efficiency vs p", "", 20, 0., beamP);
+ phieffElectrons = hf.createProfile1D(plotDir + "Electrons Efficiency vs phi", "", 25, -0.25, 0.25);
+ ctheffElectrons = hf.createProfile1D(plotDir + "Electrons Efficiency vs cos(theta)", "", 25, -0.25, 0.25);
+
}
@Override
@@ -98,19 +105,25 @@
aida.tree().cd("/");
//make sure the required collections exist
- if (!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName))
+ if (!event.hasCollection(RawTrackerHit.class, rawTrackerHitCollectionName)) {
return;
- if (!event.hasCollection(FittedRawTrackerHit.class, fittedTrackerHitCollectionName))
+ }
+ if (!event.hasCollection(LCRelation.class, fittedTrackerHitCollectionName)) {
return;
- if (!event.hasCollection(Track.class, trackCollectionName))
+ }
+ if (!event.hasCollection(Track.class, trackCollectionName)) {
return;
- if (!event.hasCollection(LCRelation.class, rotatedMCRelationsCollectionName))
+ }
+ if (!event.hasCollection(LCRelation.class, rotatedMCRelationsCollectionName)) {
return;
- if (!event.hasCollection(SiTrackerHitStrip1D.class, siClusterCollectionName))
+ }
+ if (!event.hasCollection(TrackerHit.class, siClusterCollectionName)) {
return;
+ }
- if (!event.hasCollection(SimTrackerHit.class, trackerHitCollectionName))
+ if (!event.hasCollection(SimTrackerHit.class, trackerHitCollectionName)) {
return;
+ }
//
//get the b-field
Hep3Vector IP = new BasicHep3Vector(0., 0., 1.);
@@ -120,17 +133,20 @@
RelationalTable hittomc = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
List<LCRelation> mcrelations = event.get(LCRelation.class, rotatedMCRelationsCollectionName);
for (LCRelation relation : mcrelations) {
- if (relation != null && relation.getFrom() != null && relation.getTo() != null)
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
hittomc.add(relation.getFrom(), relation.getTo());
+ }
}
+
RelationalTable mcHittomcP = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
// Get the collections of SimTrackerHits
List<List<SimTrackerHit>> simcols = event.get(SimTrackerHit.class);
// Loop over the SimTrackerHits and fill in the relational table
for (List<SimTrackerHit> simlist : simcols) {
for (SimTrackerHit simhit : simlist) {
- if (simhit.getMCParticle() != null)
+ if (simhit.getMCParticle() != null) {
mcHittomcP.add(simhit, simhit.getMCParticle());
+ }
}
}
RelationalTable trktomc = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
@@ -138,47 +154,68 @@
if (event.hasCollection(LCRelation.class, "SVTTrueHitRelations")) {
List<LCRelation> trueHitRelations = event.get(LCRelation.class, "SVTTrueHitRelations");
for (LCRelation relation : trueHitRelations) {
- if (relation != null && relation.getFrom() != null && relation.getTo() != null)
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
rawtomc.add(relation.getFrom(), relation.getTo());
+ }
}
}
// make relational table for strip clusters to mc particle
- List<SiTrackerHitStrip1D> siClusters = event.get(SiTrackerHitStrip1D.class, siClusterCollectionName);
+ List<TrackerHit> siClusters = event.get(TrackerHit.class, siClusterCollectionName);
RelationalTable clustertosimhit = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
- for (SiTrackerHit cluster : siClusters) {
+ for (TrackerHit cluster : siClusters) {
List<RawTrackerHit> rawHits = cluster.getRawHits();
for (RawTrackerHit rth : rawHits) {
Set<SimTrackerHit> simTrackerHits = rawtomc.allFrom(rth);
- if (simTrackerHits != null)
+ if (simTrackerHits != null) {
for (SimTrackerHit simhit : simTrackerHits) {
clustertosimhit.add(cluster, simhit);
}
+ }
}
}
//relational tables from mc particle to raw and fitted tracker hits
RelationalTable fittomc = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
- List<FittedRawTrackerHit> fittedTrackerHits = event.get(FittedRawTrackerHit.class, fittedTrackerHitCollectionName);
- for (FittedRawTrackerHit hit : fittedTrackerHits) {
- RawTrackerHit rth = hit.getRawTrackerHit();
+ List<LCRelation> fittedTrackerHits = event.get(LCRelation.class, fittedTrackerHitCollectionName);
+ for (LCRelation hit : fittedTrackerHits) {
+ RawTrackerHit rth = FittedRawTrackerHit.getRawTrackerHit(hit);
Set<SimTrackerHit> simTrackerHits = rawtomc.allFrom(rth);
- if (simTrackerHits != null)
+ if (simTrackerHits != null) {
for (SimTrackerHit simhit : simTrackerHits) {
- if (simhit.getMCParticle() != null)
+ if (simhit.getMCParticle() != null) {
fittomc.add(hit, simhit.getMCParticle());
+ }
}
+ }
}
+ RelationalTable hittostrip = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
+ List<LCRelation> hitrelations = event.get(LCRelation.class, helicalTrackHitRelationsCollectionName);
+ for (LCRelation relation : hitrelations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ hittostrip.add(relation.getFrom(), relation.getTo());
+ }
+ }
+
+ RelationalTable hittorotated = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE, RelationalTable.Weighting.UNWEIGHTED);
+ List<LCRelation> rotaterelations = event.get(LCRelation.class, rotatedHelicalTrackHitRelationsCollectionName);
+ for (LCRelation relation : rotaterelations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ hittorotated.add(relation.getFrom(), relation.getTo());
+ }
+ }
+
// Instantiate the class that determines if a track is "findable"
FindableTrack findable = new FindableTrack(event);
- List<Track> tracks = event.get(Track.class, trackCollectionName);
+ List<Track> tracks = event.get(Track.class, trackCollectionName);
for (Track trk : tracks) {
- TrackAnalysis tkanal = new TrackAnalysis(trk, hittomc);
+ TrackAnalysis tkanal = new TrackAnalysis(trk, hittomc, rawtomc, hittostrip, hittorotated);
tkanalMap.put(trk, tkanal);
MCParticle mcp = tkanal.getMCParticleNew();
- if (mcp != null)
- // Create a map between the tracks found and the assigned MC particle
+ if (mcp != null) // Create a map between the tracks found and the assigned MC particle
+ {
trktomc.add(trk, tkanal.getMCParticleNew());
+ }
}
// Now loop over all MC Particles
@@ -193,10 +230,10 @@
double pz = mcp.getPZ();
double pt = Math.sqrt(px * px + py * py);
double p = Math.sqrt(pt * pt + pz * pz);
- double cth = pz / p;
+ double cth = py / p;
double theta = 180. * Math.acos(cth) / Math.PI;
double eta = -Math.log(Math.tan(Math.atan2(pt, pz) / 2));
- double phi = Math.atan2(py, px);
+ double phi = Math.atan2(px, pz);
// Find the number of layers hit by this mc particle
// System.out.println("MC pt=" + pt);
int nhits = findable.LayersHit(mcp);
@@ -219,14 +256,17 @@
//it's the A'...let's see if we found both tracks.
List<MCParticle> daughters = mcp.getDaughters();
for (MCParticle d : daughters) {
- if (trktomc.allTo(d).isEmpty())
+ if (trktomc.allTo(d).isEmpty()) {
bothreco = false;
- if (!findable.InnerTrackerIsFindable(d, nlayers - 2))
+ }
+ if (!findable.InnerTrackerIsFindable(d, nlayers - 2)) {
bothfindable = false;
+ }
}
double vtxWgt = 0;
- if (bothreco)
+ if (bothreco) {
vtxWgt = 1.0;
+ }
// VxEff.fill(mcp.getOriginX(), vtxWgt);
// VyEff.fill(mcp.getOriginY(), vtxWgt);
// VzEff.fill(mcp.getOriginZ(), vtxWgt);
@@ -242,13 +282,13 @@
_nchMCP++;
findableTracks++;
double wgt = 0.;
- if (ntrk > 0)
+ if (ntrk > 0) {
wgt = 1.;
+ }
foundTracks += wgt;
peffFindable.fill(p, wgt);
phieffFindable.fill(phi, wgt);
ctheffFindable.fill(cth, wgt);
-
if (wgt == 0) {
Set<SimTrackerHit> mchitlist = mcHittomcP.allTo(mcp);
@@ -256,8 +296,9 @@
Set<FittedRawTrackerHit> fitlist = fittomc.allTo(mcp);
if (debugTrackEfficiency) {
System.out.println("TrackMCEfficiencyMonitoring:: Missed a findable track with MC p = " + p);
- if (!hasHTHInEachLayer(hitlist, fitlist))
+ if (!hasHTHInEachLayer(hitlist, fitlist)) {
System.out.println("This track failed becasue it's missing a helical track hit");
+ }
}
}
@@ -266,13 +307,13 @@
totelectrons++;
// findableelectrons++;
double wgt = 0.;
- if (ntrk > 0)
+ if (ntrk > 0) {
wgt = 1.;
+ }
foundelectrons += wgt;
peffElectrons.fill(p, wgt);
phieffElectrons.fill(phi, wgt);
ctheffElectrons.fill(cth, wgt);
-
// }
}
@@ -300,8 +341,9 @@
for (int layer = 1; layer < nlayers - 2; layer += 2) {
boolean hasThisLayer = false;
for (HelicalTrackCross hit : list) {
- if (hit.Layer() == layer)
+ if (hit.Layer() == layer) {
hasThisLayer = true;
+ }
}
if (!hasThisLayer) {
System.out.println("Missing reconstructed hit in layer = " + layer);
@@ -324,10 +366,12 @@
}
}
- if (!hasFitHitSL1)
+ if (!hasFitHitSL1) {
System.out.println("MISSING a hit in SL1!!!");
- if (!hasFitHitSL2)
+ }
+ if (!hasFitHitSL2) {
System.out.println("MISSING a hit in SL2!!!");
+ }
return false;
}
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality/TrackingMonitoring.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/dataquality/TrackingMonitoring.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,18 +1,21 @@
package org.hps.analysis.dataquality;
import hep.aida.IHistogram1D;
+import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.lcsim.event.EventHeader;
-import org.lcsim.event.LCIOParameters;
+import org.lcsim.event.LCRelation;
+import org.lcsim.event.RelationalTable;
import org.lcsim.event.Track;
+import org.lcsim.event.TrackerHit;
+import org.lcsim.event.base.BaseRelationalTable;
import org.lcsim.geometry.Detector;
import org.lcsim.geometry.IDDecoder;
/**
- * DQM driver for reconstructed track quantities
- * plots things like number of tracks/event, chi^2, track parameters
- * (d0/z0/theta/phi/curvature)
+ * DQM driver for reconstructed track quantities plots things like number of
+ * tracks/event, chi^2, track parameters (d0/z0/theta/phi/curvature)
*
* @author mgraham on Mar 28, 2014
*/
@@ -20,10 +23,11 @@
public class TrackingMonitoring extends DataQualityMonitor {
private String helicalTrackHitCollectionName = "HelicalTrackHits";
- private String rotatedTrackHitCollectionName = "RotatedHelicalTrackHits";
- private String helicalTrackHitRelationsCollectionName = "HelicalTrackHitRelations";
+ private final String rotatedTrackHitCollectionName = "RotatedHelicalTrackHits";
+ private final String helicalTrackHitRelationsCollectionName = "HelicalTrackHitRelations";
+ private final String rotatedHelicalTrackHitRelationsCollectionName = "RotatedHelicalTrackHitRelations";
private String trackCollectionName = "MatchedTracks";
- private String trackerName = "Tracker";
+ private final String trackerName = "Tracker";
String ecalSubdetectorName = "Ecal";
String ecalCollectionName = "EcalClusters";
private Detector detector = null;
@@ -35,7 +39,7 @@
double sumz0 = 0;
double sumslope = 0;
double sumchisq = 0;
- private String plotDir = "Tracks/";
+ private final String plotDir = "Tracks/";
String[] trackingQuantNames = {"avg_N_tracks", "avg_N_hitsPerTrack", "avg_d0", "avg_z0", "avg_absslope", "avg_chi2"};
public void setHelicalTrackHitCollectionName(String helicalTrackHitCollectionName) {
@@ -51,14 +55,16 @@
this.detector = detector;
aida.tree().cd("/");
- IHistogram1D trkChi2 = aida.histogram1D(plotDir+"Track Chi2", 25, 0, 25.0);
- IHistogram1D nTracks = aida.histogram1D(plotDir+"Tracks per Event", 6, 0, 6);
- IHistogram1D trkd0 = aida.histogram1D(plotDir+"d0 ", 25, -5.0, 5.0);
- IHistogram1D trkphi = aida.histogram1D(plotDir+"sinphi ", 25, -0.2, 0.2);
- IHistogram1D trkomega = aida.histogram1D(plotDir+"omega ", 25, -0.00025, 0.00025);
- IHistogram1D trklam = aida.histogram1D(plotDir+"tan(lambda) ", 25, -0.1, 0.1);
- IHistogram1D trkz0 = aida.histogram1D(plotDir+"z0 ", 25, -1.0, 1.0);
- IHistogram1D nHits = aida.histogram1D(plotDir+"Hits per Track", 2, 5, 7);
+ IHistogram1D trkChi2 = aida.histogram1D(plotDir + "Track Chi2", 25, 0, 25.0);
+ IHistogram1D nTracks = aida.histogram1D(plotDir + "Tracks per Event", 6, 0, 6);
+ IHistogram1D trkd0 = aida.histogram1D(plotDir + "d0 ", 25, -5.0, 5.0);
+ IHistogram1D trkphi = aida.histogram1D(plotDir + "sinphi ", 25, -0.2, 0.2);
+ IHistogram1D trkomega = aida.histogram1D(plotDir + "omega ", 25, -0.00025, 0.00025);
+ IHistogram1D trklam = aida.histogram1D(plotDir + "tan(lambda) ", 25, -0.1, 0.1);
+ IHistogram1D trkz0 = aida.histogram1D(plotDir + "z0 ", 25, -1.0, 1.0);
+ IHistogram1D nHits = aida.histogram1D(plotDir + "Hits per Track", 2, 5, 7);
+ IHistogram1D trackMeanTime = aida.histogram1D(plotDir + "Mean time of hits on track", 200, -20., 20.);
+ IHistogram1D trackRMSTime = aida.histogram1D(plotDir + "RMS time of hits on track", 200, 0., 10.);
}
@@ -66,29 +72,64 @@
public void process(EventHeader event) {
aida.tree().cd("/");
-
+
+ RelationalTable hittostrip = new BaseRelationalTable(RelationalTable.Mode.MANY_TO_MANY, RelationalTable.Weighting.UNWEIGHTED);
+ List<LCRelation> hitrelations = event.get(LCRelation.class, helicalTrackHitRelationsCollectionName);
+ for (LCRelation relation : hitrelations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ hittostrip.add(relation.getFrom(), relation.getTo());
+ }
+ }
+
+ RelationalTable hittorotated = new BaseRelationalTable(RelationalTable.Mode.ONE_TO_ONE, RelationalTable.Weighting.UNWEIGHTED);
+ List<LCRelation> rotaterelations = event.get(LCRelation.class, rotatedHelicalTrackHitRelationsCollectionName);
+ for (LCRelation relation : rotaterelations) {
+ if (relation != null && relation.getFrom() != null && relation.getTo() != null) {
+ hittorotated.add(relation.getFrom(), relation.getTo());
+ }
+ }
+
if (!event.hasCollection(Track.class, trackCollectionName)) {
- aida.histogram1D(plotDir+"Tracks per Event").fill(0);
+ aida.histogram1D(plotDir + "Tracks per Event").fill(0);
return;
}
nEvents++;
List<Track> tracks = event.get(Track.class, trackCollectionName);
nTotTracks += tracks.size();
- aida.histogram1D(plotDir+"Tracks per Event").fill(tracks.size());
+ aida.histogram1D(plotDir + "Tracks per Event").fill(tracks.size());
for (Track trk : tracks) {
nTotHits += trk.getTrackerHits().size();
- aida.histogram1D(plotDir+"Track Chi2").fill(trk.getChi2());
- aida.histogram1D(plotDir+"Hits per Track").fill(trk.getTrackerHits().size());
- //why is getTrackParameter depricated? How am I supposed to get this?
- aida.histogram1D(plotDir+"d0 ").fill(trk.getTrackParameter(LCIOParameters.ParameterName.d0.ordinal()));
- aida.histogram1D(plotDir+"sinphi ").fill(Math.sin(trk.getTrackParameter(LCIOParameters.ParameterName.phi0.ordinal())));
- aida.histogram1D(plotDir+"omega ").fill(trk.getTrackParameter(LCIOParameters.ParameterName.omega.ordinal()));
- aida.histogram1D(plotDir+"tan(lambda) ").fill(trk.getTrackParameter(LCIOParameters.ParameterName.tanLambda.ordinal()));
- aida.histogram1D(plotDir+"z0 ").fill(trk.getTrackParameter(LCIOParameters.ParameterName.z0.ordinal()));
- sumd0 += trk.getTrackParameter(LCIOParameters.ParameterName.d0.ordinal());
- sumz0 += trk.getTrackParameter(LCIOParameters.ParameterName.z0.ordinal());
- sumslope += Math.abs(trk.getTrackParameter(LCIOParameters.ParameterName.tanLambda.ordinal()));
+ aida.histogram1D(plotDir + "Track Chi2").fill(trk.getChi2());
+ aida.histogram1D(plotDir + "Hits per Track").fill(trk.getTrackerHits().size());
+ aida.histogram1D(plotDir + "d0 ").fill(trk.getTrackStates().get(0).getD0());
+ aida.histogram1D(plotDir + "sinphi ").fill(Math.sin(trk.getTrackStates().get(0).getPhi()));
+ aida.histogram1D(plotDir + "omega ").fill(trk.getTrackStates().get(0).getOmega());
+ aida.histogram1D(plotDir + "tan(lambda) ").fill(trk.getTrackStates().get(0).getTanLambda());
+ aida.histogram1D(plotDir + "z0 ").fill(trk.getTrackStates().get(0).getZ0());
+ sumd0 += trk.getTrackStates().get(0).getD0();
+ sumz0 += trk.getTrackStates().get(0).getZ0();
+ sumslope += Math.abs(trk.getTrackStates().get(0).getTanLambda());
sumchisq += trk.getChi2();
+
+ int nStrips = 0;
+ double meanTime = 0;
+ double rmsTime = 0;
+ for (TrackerHit hit : trk.getTrackerHits()) {
+// System.out.format("cross time: %f\n", hit.getTime());
+ Collection<TrackerHit> htsList = hittostrip.allFrom(hittorotated.from(hit));
+ for (TrackerHit hts : htsList) {
+ nStrips++;
+ meanTime += hts.getTime();
+ rmsTime += hts.getTime() * hts.getTime();
+// System.out.format("strip time: %f\n", hts.getTime());
+
+ }
+ }
+ meanTime /= nStrips;
+ rmsTime = Math.sqrt(rmsTime / nStrips);
+ aida.histogram1D(plotDir + "Mean time of hits on track").fill(meanTime);
+ aida.histogram1D(plotDir + "RMS time of hits on track").fill(rmsTime);
+// System.out.format("%d strips, mean time %f, RMS time %f\n", nStrips, meanTime, rmsTime);
}
}
@@ -102,20 +143,19 @@
monitoredQuantityMap.put(trackingQuantNames[5], sumchisq / nTotTracks);
}
-
@Override
public void printDQMData() {
System.out.println("ReconMonitoring::printDQMData");
- for (Map.Entry<String, Double> entry : monitoredQuantityMap.entrySet())
+ for (Map.Entry<String, Double> entry : monitoredQuantityMap.entrySet()) {
System.out.println(entry.getKey() + " = " + entry.getValue());
+ }
System.out.println("*******************************");
}
@Override
public void printDQMStrings() {
- for (Map.Entry<String, Double> entry : monitoredQuantityMap.entrySet())
- System.out.println("ALTER TABLE dqm ADD "+entry.getKey()+" double;");
-
+ for (Map.Entry<String, Double> entry : monitoredQuantityMap.entrySet()) {
+ System.out.println("ALTER TABLE dqm ADD " + entry.getKey() + " double;");
+ }
}
-
}
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/ECalCellIDPrintDriver.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/ECalCellIDPrintDriver.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,4 +1,4 @@
-package org.hps.monitoring.drivers.ecal;
+package org.hps.analysis.ecal;
import java.io.IOException;
import java.io.PrintWriter;
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalClusterPlots.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalClusterPlots.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,4 +1,4 @@
-package org.hps.monitoring.drivers.ecal;
+package org.hps.analysis.ecal;
import hep.aida.IHistogram1D;
import hep.aida.IHistogram2D;
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalDaqPlots.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalDaqPlots.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,4 +1,4 @@
-package org.hps.monitoring.drivers.ecal;
+package org.hps.analysis.ecal;
import hep.aida.IHistogram1D;
import hep.aida.IPlotter;
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalEventMonitor.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalEventMonitor.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,4 +1,4 @@
-package org.hps.monitoring.drivers.ecal;
+package org.hps.analysis.ecal;
import hep.aida.IHistogram2D;
import hep.aida.IPlotter;
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalEvsX.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalEvsX.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,4 +1,4 @@
-package org.hps.monitoring.drivers.ecal;
+package org.hps.analysis.ecal;
import hep.aida.IHistogram1D;
import hep.aida.IHistogram2D;
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalHitPlots.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalHitPlots.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,4 +1,4 @@
-package org.hps.monitoring.drivers.ecal;
+package org.hps.analysis.ecal;
import hep.aida.IHistogram1D;
import hep.aida.IHistogram2D;
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalMonitoringPlots.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/ecal/EcalMonitoringPlots.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,4 +1,4 @@
-package org.hps.monitoring.drivers.ecal;
+package org.hps.analysis.ecal;
import hep.aida.IHistogram2D;
import hep.aida.IPlotter;
java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/examples
--- java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/examples/TrackAnalysis.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/analysis/src/main/java/org/hps/analysis/examples/TrackAnalysis.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -6,18 +6,32 @@
*/
package org.hps.analysis.examples;
+import hep.physics.matrix.BasicMatrix;
+import hep.physics.matrix.SymmetricMatrix;
+import hep.physics.vec.BasicHep3Matrix;
import hep.physics.vec.BasicHep3Vector;
import hep.physics.vec.Hep3Vector;
import hep.physics.vec.VecOp;
-
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
-
+import java.util.TreeMap;
+import org.apache.commons.math3.linear.Array2DRowRealMatrix;
+import org.apache.commons.math3.linear.ArrayRealVector;
+import org.apache.commons.math3.linear.EigenDecomposition;
+import org.apache.commons.math3.linear.RealMatrix;
+import org.hps.conditions.deprecated.SvtUtils;
+import static org.hps.recon.tracking.CoordinateTransformations.transformVectorToTracking;
+import org.hps.recon.tracking.TrackerHitUtils;
+import org.lcsim.detector.identifier.IIdentifier;
+import org.lcsim.detector.identifier.Identifier;
import org.lcsim.event.MCParticle;
+import org.lcsim.event.RawTrackerHit;
import org.lcsim.event.RelationalTable;
+import org.lcsim.event.SimTrackerHit;
import org.lcsim.event.Track;
import org.lcsim.event.TrackerHit;
import org.lcsim.fit.helicaltrack.HelicalTrack2DHit;
@@ -32,8 +46,11 @@
public class TrackAnalysis {
private enum HelixPar {
+
Curvature, Phi0, DCA, Z0, Slope
};
+ private static final Hep3Vector axial = new BasicHep3Vector(0, 1, 0);
+
private MCParticle _mcp = null;
private int _nhits;
private int _nbadhits;
@@ -47,25 +64,35 @@
private int _nbadAxialhits;
private int _nbadZhits;
private boolean _hasLayerOne;
- List<Integer> badHitList = new ArrayList();
- List<Integer> sharedHitList = new ArrayList();
- List<Integer> trackLayerList = new ArrayList();
- Map<MCParticle, HelicalTrackCross> badhits = new HashMap<MCParticle, HelicalTrackCross>();
- private int[] _nMCHitsPerLayer={0,0,0,0,0,0,0,0,0,0,0,0};
- private int[] _nStripHitsPerLayer={0,0,0,0,0,0,0,0,0,0,0,0};
- Map<Integer, Hep3Vector> _hitLocationPerLayer = new HashMap<Integer,Hep3Vector>();
+ private List<Integer> badHitList = new ArrayList();
+ private List<Integer> sharedHitList = new ArrayList();
+ private List<Integer> trackLayerList = new ArrayList();
+ private Map<MCParticle, HelicalTrackCross> badhits = new HashMap<MCParticle, HelicalTrackCross>();
+ // Create a map containing the number of hits for each MCParticle associated with the track
+ private Map<MCParticle, Integer> mcmap = new HashMap<MCParticle, Integer>();
+ private Map<MCParticle, Integer> mcmapAll = new HashMap<MCParticle, Integer>();
+ private Map<MCParticle, Integer> mcmapAxial = new HashMap<MCParticle, Integer>();
+ private Map<MCParticle, Integer> mcmapZ = new HashMap<MCParticle, Integer>();
+ private int[] _nMCHitsPerLayer = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
+ private int[] _nStripHitsPerLayer = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
+ private Map<Integer, Hep3Vector> _hitLocationPerLayer = new HashMap<Integer, Hep3Vector>();
- /** Creates a new instance of TrackAnalysis */
+ /**
+ * Creates a new instance of TrackAnalysis
+ */
+ public TrackAnalysis(Track trk, RelationalTable hittomc, RelationalTable rthtosimhit, RelationalTable hittostrip, RelationalTable hittorotated) {
+ doAnalysis(trk, hittomc, rthtosimhit, hittostrip, hittorotated);
+ }
+
public TrackAnalysis(Track trk, RelationalTable hittomc) {
+ doAnalysis(trk, hittomc, null, null, null);
+ }
+ private void doAnalysis(Track trk, RelationalTable hittomc, RelationalTable rthtosimhit, RelationalTable hittostrip, RelationalTable hittorotated) {
+
// Get the number of hits on the track
_nhits = trk.getTrackerHits().size();
- // Create a map containing the number of hits for each MCParticle associated with the track
- Map<MCParticle, Integer> mcmap = new HashMap<MCParticle, Integer>();
- Map<MCParticle, Integer> mcmapAll = new HashMap<MCParticle, Integer>();
- Map<MCParticle, Integer> mcmapAxial = new HashMap<MCParticle, Integer>();
- Map<MCParticle, Integer> mcmapZ = new HashMap<MCParticle, Integer>();
_hasLayerOne = false;
// Loop over the hits on the track and make sure we have HelicalTrackHits (which contain the MC particle)
for (TrackerHit hit : trk.getTrackerHits()) {
@@ -73,79 +100,24 @@
Set<MCParticle> mclist = hittomc.allFrom(hit);
for (MCParticle mcp : mclist) {
Integer mchits = 0;
- if (mcmap.containsKey(mcp))
+ if (mcmap.containsKey(mcp)) {
mchits = mcmap.get(mcp);
+ }
mchits++;
mcmap.put(mcp, mchits);
}
- BasicHep3Vector axial = new BasicHep3Vector();
- axial.setV(0, 1, 0);
- HelicalTrackHit htc = (HelicalTrackHit) hit;
+// HelicalTrackHit htc = (HelicalTrackHit) hit;
if (hit instanceof HelicalTrackCross) {
- HelicalTrackCross cross = (HelicalTrackCross) hit;
- List<HelicalTrackStrip> clusterlist = cross.getStrips();
-
- for (HelicalTrackStrip cl : clusterlist) {
- int layer = cl.layer();
- if (layer == 1) _hasLayerOne = true;
-
- _nStripHitsPerLayer[layer - 1] = cl.rawhits().size();
- _hitLocationPerLayer.put(layer,clusterPosition(cl));
- _nhitsNew++;
- double axdotu = VecOp.dot(cl.u(), axial);
- boolean isAxial = false;
- if (axdotu > 0.5) {
- isAxial = true;
- _nAxialhits++;
- } else _nZhits++;
- List<MCParticle> mcPartList = cl.MCParticles();
- _nMCHitsPerLayer[layer-1] = mcPartList.size();
- for (MCParticle mcp : mcPartList) {
- Integer mchits = 0;
- if (mcmapAll.containsKey(mcp))
- mchits = mcmapAll.get(mcp);
- mchits++;
- mcmapAll.put(mcp, mchits);
- if (isAxial) {
- Integer mchitsAxial = 0;
- if (mcmapAxial.containsKey(mcp))
- mchitsAxial = mcmapAxial.get(mcp);
- mchitsAxial++;
- mcmapAxial.put(mcp, mchitsAxial);
- } else {
- Integer mchitsZ = 0;
- if (mcmapZ.containsKey(mcp))
- mchitsZ = mcmapZ.get(mcp);
- mchitsZ++;
- mcmapZ.put(mcp, mchitsZ);
- }
- }
- }
+ countHit((HelicalTrackCross) hit);
+ } else if (hit instanceof HelicalTrack2DHit) {
+ countHit((HelicalTrack2DHit) hit);
} else {
- _nhitsNew++;
- _nAxialhits++;
- HelicalTrack2DHit hit2d = (HelicalTrack2DHit) hit;
- List<MCParticle> mcPartList = hit2d.getMCParticles();
- //assume that lone hits are all axial
- boolean isAxial = true;
- for (MCParticle mcp : mcPartList) {
- Integer mchits = 0;
- if (mcmapAll.containsKey(mcp))
- mchits = mcmapAll.get(mcp);
- mchits++;
- mcmapAll.put(mcp, mchits);
- Integer mchitsAxial = 0;
- if (mcmapAxial.containsKey(mcp))
- mchitsAxial = mcmapAxial.get(mcp);
- mchitsAxial++;
- mcmapAxial.put(mcp, mchitsAxial);
- }
+ countHit(hit, rthtosimhit, hittostrip, hittorotated);
}
}
// Find the MCParticle that has the most hits on the track
-
int nbest = 0;
MCParticle mcbest = null;
for (MCParticle mcp : mcmap.keySet()) {
@@ -156,12 +128,12 @@
}
}
- if (nbest > 0)
+ if (nbest > 0) {
_mcp = mcbest;
+ }
_purity = (double) nbest / (double) _nhits;
_nbadhits = _nhits - nbest;
-
//single strip layer accounting.
int nbestAll = 0;
MCParticle mcbestAll = null;
@@ -173,42 +145,202 @@
}
}
- if (nbestAll > 0)
+ if (nbestAll > 0) {
_mcpNew = mcbestAll;
+ }
_purityNew = (double) nbestAll / (double) _nhitsNew;
_nbadhitsNew = _nhitsNew - nbestAll;
for (TrackerHit hit : trk.getTrackerHits()) {
- HelicalTrackHit htc = (HelicalTrackHit) hit;
if (hit instanceof HelicalTrackCross) {
- HelicalTrackCross cross = (HelicalTrackCross) hit;
- List<HelicalTrackStrip> clusterlist = cross.getStrips();
- for (HelicalTrackStrip cl : clusterlist){
- trackLayerList.add(cl.layer());
- if (!(cl.MCParticles().contains(_mcpNew))) {
- badHitList.add(cl.layer());
- badhits.put(_mcpNew, cross);
+ checkForBadHit((HelicalTrackCross) hit);
+ }
+ }
+
+ if (_nAxialhits > 0) {
+ if (mcmapAxial.containsKey(_mcpNew)) {
+ _nbadAxialhits = _nAxialhits - mcmapAxial.get(_mcpNew);
+ } else {
+ _nbadAxialhits = _nAxialhits;
+ }
+ }
+ if (_nZhits > 0) {
+ if (mcmapZ.containsKey(_mcpNew)) {
+ _nbadZhits = _nZhits - mcmapZ.get(_mcpNew);
+ } else {
+ _nbadZhits = _nZhits;
+ }
+ }
+ }
+
+ private void countHit(HelicalTrackCross cross) {
+ List<HelicalTrackStrip> clusterlist = cross.getStrips();
+
+ for (HelicalTrackStrip cl : clusterlist) {
+ int layer = cl.layer();
+ if (layer == 1) {
+ _hasLayerOne = true;
+ }
+
+ _nStripHitsPerLayer[layer - 1] = cl.rawhits().size();
+ _hitLocationPerLayer.put(layer, clusterPosition(cl));
+ _nhitsNew++;
+ double axdotu = VecOp.dot(cl.u(), axial);
+// System.out.println(new BasicHep3Vector(cross.getPosition()).toString() + cl.u());
+ boolean isAxial = false;
+ if (axdotu > 0.5) {
+ isAxial = true;
+ _nAxialhits++;
+ } else {
+ _nZhits++;
+ }
+ List<MCParticle> mcPartList = cl.MCParticles();
+ _nMCHitsPerLayer[layer - 1] = mcPartList.size();
+ for (MCParticle mcp : mcPartList) {
+ Integer mchits = 0;
+ if (mcmapAll.containsKey(mcp)) {
+ mchits = mcmapAll.get(mcp);
+ }
+ mchits++;
+ mcmapAll.put(mcp, mchits);
+ if (isAxial) {
+ Integer mchitsAxial = 0;
+ if (mcmapAxial.containsKey(mcp)) {
+ mchitsAxial = mcmapAxial.get(mcp);
}
- if(cl.MCParticles().size()>1)
- sharedHitList.add(cl.layer());
+ mchitsAxial++;
+ mcmapAxial.put(mcp, mchitsAxial);
+ } else {
+ Integer mchitsZ = 0;
+ if (mcmapZ.containsKey(mcp)) {
+ mchitsZ = mcmapZ.get(mcp);
+ }
+ mchitsZ++;
+ mcmapZ.put(mcp, mchitsZ);
}
}
}
+ }
+ private void countHit(TrackerHit hit, RelationalTable rthtosimhit, RelationalTable hittostrip, RelationalTable hittorotated) {
+ TrackerHit unrotatedHit = (TrackerHit) hittorotated.from(hit);
+// System.out.println("ID: " + unrotatedHit.getCellID());
+ Set<TrackerHit> hitlist = hittostrip.allFrom(unrotatedHit);
+// System.out.println("size: " + hitlist.size());
+ for (TrackerHit cl : hitlist) {
+ int layer = -1;
+ int module = -1;
+ List<RawTrackerHit> rawHits = cl.getRawHits();
+// System.out.println("RawHits: " + rawHits.size());
+ for (RawTrackerHit rawHit : rawHits) {
+// System.out.println(rawHit.getCellID());
+ IIdentifier id = new Identifier(rawHit.getCellID());
+ int newLayer = SvtUtils.getInstance().getHelper().getValue(id, "layer");
+ if (layer != -1 && layer != newLayer) {
+ System.out.format("TrackerHit has hits from multiple layers: %d and %d\n", layer, newLayer);
+ }
+ layer = newLayer;
+ int newModule = SvtUtils.getInstance().getHelper().getValue(id, "module");
+ if (module != -1 && module != newModule) {
+ System.out.format("TrackerHit has hits from multiple modules: %d and %d\n", module, newModule);
+ }
+ module = newModule;
+// System.out.println(SvtUtils.getInstance().getHelper().getValue(id, "strip"));
+ }
+ if (layer == 1) {
+ _hasLayerOne = true;
+ }
+ DiagonalizedCovarianceMatrix covariance = new DiagonalizedCovarianceMatrix(cl);
+ _nStripHitsPerLayer[layer - 1] = cl.getRawHits().size();
+ _hitLocationPerLayer.put(layer, new BasicHep3Vector(hit.getPosition()));
+ _nhitsNew++;
- if (_nAxialhits > 0)
- if (mcmapAxial.containsKey(_mcpNew))
- _nbadAxialhits = _nAxialhits - mcmapAxial.get(_mcpNew);
- else _nbadAxialhits = _nAxialhits;
- if (_nZhits > 0)
- if (mcmapZ.containsKey(_mcpNew))
- _nbadZhits = _nZhits - mcmapZ.get(_mcpNew);
- else _nbadZhits = _nZhits;
+ double axdotu = VecOp.dot(transformVectorToTracking(covariance.getMeasuredVector()), axial);
+// System.out.println(transformVectorToTracking(new BasicHep3Vector(cl.getPosition())).toString() + transformVectorToTracking(covariance.getMeasuredVector()));
+ boolean isAxial = false;
+ if (axdotu > 0.5) {
+ isAxial = true;
+ _nAxialhits++;
+ } else {
+ _nZhits++;
+ }
+ // get the set of MCParticles associated with this hit and update the hit count for each MCParticle
+ Set<MCParticle> mcPartList = new HashSet<MCParticle>();
+ for (RawTrackerHit rawHit : rawHits) {
+ Set<SimTrackerHit> simhits = (Set<SimTrackerHit>) rthtosimhit.allFrom(rawHit);
+ for (SimTrackerHit simhit : simhits) {
+ if (simhit != null && simhit.getMCParticle() != null) {
+ mcPartList.add(simhit.getMCParticle());
+ }
+ }
+ }
+// System.out.println("MCParticle count: " + mcPartList.size());
+ _nMCHitsPerLayer[layer - 1] = mcPartList.size();
+ for (MCParticle mcp : mcPartList) {
+ Integer mchits = 0;
+ if (mcmapAll.containsKey(mcp)) {
+ mchits = mcmapAll.get(mcp);
+ }
+ mchits++;
+ mcmapAll.put(mcp, mchits);
+ if (isAxial) {
+ Integer mchitsAxial = 0;
+ if (mcmapAxial.containsKey(mcp)) {
+ mchitsAxial = mcmapAxial.get(mcp);
+ }
+ mchitsAxial++;
+ mcmapAxial.put(mcp, mchitsAxial);
+ } else {
+ Integer mchitsZ = 0;
+ if (mcmapZ.containsKey(mcp)) {
+ mchitsZ = mcmapZ.get(mcp);
+ }
+ mchitsZ++;
+ mcmapZ.put(mcp, mchitsZ);
+ }
+ }
+ }
}
- public Hep3Vector clusterPosition(HelicalTrackStrip cl) {
+ private void countHit(HelicalTrack2DHit hit2d) {
+ _nhitsNew++;
+ _nAxialhits++;
+ List<MCParticle> mcPartList = hit2d.getMCParticles();
+ //assume that lone hits are all axial
+ boolean isAxial = true;
+ for (MCParticle mcp : mcPartList) {
+ Integer mchits = 0;
+ if (mcmapAll.containsKey(mcp)) {
+ mchits = mcmapAll.get(mcp);
+ }
+ mchits++;
+ mcmapAll.put(mcp, mchits);
+ Integer mchitsAxial = 0;
+ if (mcmapAxial.containsKey(mcp)) {
+ mchitsAxial = mcmapAxial.get(mcp);
+ }
+ mchitsAxial++;
+ mcmapAxial.put(mcp, mchitsAxial);
+ }
+ }
+
+ private void checkForBadHit(HelicalTrackCross cross) {
+ List<HelicalTrackStrip> clusterlist = cross.getStrips();
+ for (HelicalTrackStrip cl : clusterlist) {
+ trackLayerList.add(cl.layer());
+ if (!(cl.MCParticles().contains(_mcpNew))) {
+ badHitList.add(cl.layer());
+ badhits.put(_mcpNew, cross);
+ }
+ if (cl.MCParticles().size() > 1) {
+ sharedHitList.add(cl.layer());
+ }
+ }
+ }
+
+ public static Hep3Vector clusterPosition(HelicalTrackStrip cl) {
Hep3Vector corigin = cl.origin();
Hep3Vector u = cl.u();
double umeas = cl.umeas();
@@ -284,15 +416,97 @@
public List<Integer> getBadHitList() {
return badHitList;
}
- public List<Integer> getSharedHitList() {
+
+ public List<Integer> getSharedHitList() {
return sharedHitList;
}
-
- public List<Integer> getTrackLayerList() {
+
+ public List<Integer> getTrackLayerList() {
return trackLayerList;
}
public Map<MCParticle, HelicalTrackCross> getBadHits() {
return badhits;
}
+
+ public static class DiagonalizedCovarianceMatrix {
+
+ double[] measurement_errors = new double[3];
+ Hep3Vector[] measurement_vectors = new Hep3Vector[3];
+
+ public DiagonalizedCovarianceMatrix(TrackerHit hit) {
+ SymmetricMatrix cov = new SymmetricMatrix(3, hit.getCovMatrix(), true);
+ RealMatrix covMatrix = new Array2DRowRealMatrix(3, 3);
+ for (int i = 0; i < 3; i++) {
+ for (int j = 0; j < 3; j++) {
+ covMatrix.setEntry(i, j, cov.e(i, j));
+ }
+ }
+ EigenDecomposition decomposed = new EigenDecomposition(covMatrix);
+ BasicHep3Matrix localToGlobal = new BasicHep3Matrix();
+ for (int i = 0; i < 3; i++) {
+ for (int j = 0; j < 3; j++) {
+ localToGlobal.setElement(i, j, decomposed.getV().getEntry(i, j));
+ }
+ }
+// SymmetricMatrix localToGlobal = decomposed.getV().operate(new ArrayRealVector(3))
+ {
+ double eigenvalue = decomposed.getRealEigenvalue(0);
+// Hep3Vector eigenvector = VecOp.mult(localToGlobal, new BasicHep3Vector());
+ Hep3Vector eigenvector = VecOp.mult(Math.signum(eigenvalue), new BasicHep3Vector(decomposed.getVT().getRow(0)));
+ measurement_errors[0] = eigenvalue;
+ measurement_vectors[0] = eigenvector;
+ measurement_errors[2] = eigenvalue;
+ measurement_vectors[2] = eigenvector;
+ }
+ {
+ double eigenvalue = decomposed.getRealEigenvalue(1);
+ Hep3Vector eigenvector = VecOp.mult(Math.signum(eigenvalue), new BasicHep3Vector(decomposed.getVT().getRow(1)));
+ if (eigenvalue > measurement_errors[0]) {
+ measurement_errors[0] = eigenvalue;
+ measurement_vectors[0] = eigenvector;
+ }
+ if (eigenvalue < measurement_errors[2]) {
+ measurement_errors[2] = eigenvalue;
+ measurement_vectors[2] = eigenvector;
+ }
+ }
+ {
+ double eigenvalue = decomposed.getRealEigenvalue(2);
+ Hep3Vector eigenvector = VecOp.mult(Math.signum(eigenvalue), new BasicHep3Vector(decomposed.getVT().getRow(2)));
+ if (eigenvalue > measurement_errors[0]) {
+ measurement_errors[1] = measurement_errors[0];
+ measurement_vectors[1] = measurement_vectors[0];
+ measurement_errors[0] = eigenvalue;
+ measurement_vectors[0] = eigenvector;
+ }
+ if (eigenvalue < measurement_errors[2]) {
+ measurement_errors[1] = measurement_errors[2];
+ measurement_vectors[1] = measurement_vectors[2];
+ measurement_errors[2] = eigenvalue;
+ measurement_vectors[2] = eigenvector;
+ }
+ if (measurement_vectors[1] == null) {
+ measurement_errors[1] = eigenvalue;
+ measurement_vectors[1] = eigenvector;
+ }
+ }
+// for (int i = 0; i < 3; i++) {
+// System.out.format("%d: resolution %f, vector %s\n", i, measurement_errors[i], measurement_vectors[i].toString());
+// }
+ }
+
+ public Hep3Vector getUnmeasuredVector() {
+ return measurement_vectors[0];
+ }
+
+ public Hep3Vector getMeasuredVector() {
+ return measurement_vectors[1];
+ }
+
+ public Hep3Vector getNormalVector() {
+ return measurement_vectors[2];
+ }
+
+ }
}
java/branches/hps-java_HPSJAVA-88/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/pom.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/pom.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -22,14 +22,12 @@
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludes>
- <exclude>org/hps/conditions/**/**.java</exclude>
- <!--
+ <exclude>org/hps/conditions/svt/SvtBadChannelTest.java</exclude>
+ <exclude>org/hps/conditions/svt/SvtGainInsertTest.java</exclude>
+ <exclude>org/hps/conditions/svt/SvtDetectorSetupTest.java</exclude>
+ <exclude>org/hps/conditions/svt/SvtConfigurationTest.java</exclude>
+ <exclude>org/hps/conditions/ConditionsSeriesConverterTest.java</exclude>
<exclude>org/hps/conditions/ConditionsObjectTest.java</exclude>
- <exclude>org/hps/conditions/**.java</exclude>
- <exclude>org/hps/conditions/beam/**.java</exclude>
- <exclude>org/hps/conditions/ecal/**.java</exclude>
- <exclude>org/hps/conditions/svt/**.java</exclude>
- -->
</excludes>
</configuration>
</plugin>
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -4,10 +4,9 @@
import static org.hps.conditions.TableConstants.SVT_CONDITIONS;
import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalConditionsLoader;
+import org.hps.conditions.ecal.EcalDetectorSetup;
import org.hps.conditions.svt.SvtConditions;
-import org.hps.conditions.svt.SvtConditionsLoader;
-import org.lcsim.conditions.ConditionsReader;
+import org.hps.conditions.svt.SvtDetectorSetup;
import org.lcsim.geometry.Detector;
import org.lcsim.util.Driver;
@@ -23,14 +22,16 @@
static DatabaseConditionsManager manager;
// Default conditions system XML config, which is for the Test Run 2012 database.
- String _defaultConfigResource = "/org/hps/conditions/config/conditions_database_testrun_2012.xml";
+ static final String DEFAULT_CONFIG = "/org/hps/conditions/config/conditions_database_testrun_2012.xml";
- // Default database connection parameters, which points to the SLAC development
- // database.
- static String _defaultConnectionResource = "/org/hps/conditions/config/conditions_database_testrun_2012_connection.properties";
+ // Default database connection parameters, which points to the SLAC development database.
+ static final String DEFAULT_CONNECTION = "/org/hps/conditions/config/conditions_database_testrun_2012_connection.properties";
String ecalSubdetectorName = "Ecal";
String svtSubdetectorName = "Tracker";
+
+ boolean loadSvtConditions = true;
+ boolean loadEcalConditions = true;
/**
* Constructor which initializes the conditions manager with default connection
@@ -38,8 +39,8 @@
*/
public ConditionsDriver() {
manager = new DatabaseConditionsManager();
- manager.setConnectionResource(_defaultConnectionResource);
- manager.configure(_defaultConfigResource);
+ manager.setConnectionResource(DEFAULT_CONNECTION);
+ manager.configure(DEFAULT_CONFIG);
manager.register();
}
@@ -58,23 +59,15 @@
public void setConnectionResource(String resource) {
manager.setConnectionResource(resource);
}
-
- /**
- * Set the class of the conditions reader to use.
- */
- public void setConditionsReaderClass(String className) {
- try {
- Object object = Class.forName(className).newInstance();
- ConditionsReader reader = (ConditionsReader) object;
- if (reader != null)
- manager.setBaseConditionsReader(reader);
- else
- throw new IllegalArgumentException("The class " + className + " is not a ConditionsReader.");
- } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
- throw new RuntimeException(e);
- }
+
+ public void setLoadSvtConditions(boolean loadSvtConditions) {
+ this.loadSvtConditions = loadSvtConditions;
}
-
+
+ public void setLoadEcalConditions(boolean loadEcaltConditions) {
+ this.loadEcalConditions = loadSvtConditions;
+ }
+
public void setEcalSubdetectorName(String ecalSubdetectorName) {
this.ecalSubdetectorName = ecalSubdetectorName;
}
@@ -82,16 +75,17 @@
public void setSvtSubdetectorName(String svtSubdetectorName) {
this.svtSubdetectorName = svtSubdetectorName;
}
-
+
/**
* This method updates a new detector with SVT and ECal conditions data.
*/
- // FIXME: Add here a check on the run number and if it is invalid then
- // set a default before trying to load the conditions.
public void detectorChanged(Detector detector) {
- // Load conditions onto the detector.
- loadSvtConditions(detector);
- loadEcalConditions(detector);
+ // Load SVT conditions onto the detector.
+ if (loadSvtConditions)
+ loadSvtConditions(detector);
+ // Load ECAL conditions onto the detector.
+ if (loadEcalConditions)
+ loadEcalConditions(detector);
}
/**
@@ -100,7 +94,7 @@
*/
private void loadSvtConditions(Detector detector) {
SvtConditions conditions = manager.getCachedConditions(SvtConditions.class, SVT_CONDITIONS).getCachedData();
- SvtConditionsLoader loader = new SvtConditionsLoader();
+ SvtDetectorSetup loader = new SvtDetectorSetup();
loader.load(detector, conditions);
}
@@ -110,7 +104,7 @@
*/
private void loadEcalConditions(Detector detector) {
EcalConditions conditions = manager.getCachedConditions(EcalConditions.class, ECAL_CONDITIONS).getCachedData();
- EcalConditionsLoader loader = new EcalConditionsLoader();
+ EcalDetectorSetup loader = new EcalDetectorSetup();
loader.load(detector.getSubdetector(ecalSubdetectorName), conditions);
}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/DatabaseConditionsManager.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/DatabaseConditionsManager.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -53,7 +53,7 @@
protected List<TableMetaData> tableMetaData;
protected List<ConditionsConverter> converters;
protected File connectionPropertiesFile;
- protected ConditionsReader baseReader;
+ //protected ConditionsReader baseReader;
protected static Logger logger = null;
protected ConnectionParameters connectionParameters;
protected Connection connection;
@@ -66,7 +66,7 @@
*/
public DatabaseConditionsManager() {
registerConditionsConverter(new DetectorConditionsConverter());
- baseReader = new BaseClasspathConditionsReader();
+ //baseReader = new BaseClasspathConditionsReader();
setupConnectionFromSystemProperty();
}
@@ -151,7 +151,8 @@
}
// Let the super class do whatever it think it needs to do.
- super.setDetector(detectorName, runNumber);
+ super.setDetector(detectorName, runNumber);
+ //super.setConditionsReader(this.baseReader, detectorName);
}
/**
@@ -159,10 +160,10 @@
* @param detectorName the name of the detector
*/
void setup(String detectorName) {
- if (baseReader instanceof BaseClasspathConditionsReader) {
- ((BaseClasspathConditionsReader) baseReader).setResourcePath(detectorName);
- logger.config("set resource path " + detectorName + " on conditions reader");
- }
+ //if (baseReader instanceof BaseClasspathConditionsReader) {
+ // ((BaseClasspathConditionsReader) baseReader).setResourcePath(detectorName);
+ // logger.config("set resource path " + detectorName + " on conditions reader");
+ //}
if (!isConnected())
openConnection();
else
@@ -241,10 +242,12 @@
* conditions such as the compact.xml file for the detector.
* @param reader The base ConditionsReader.
*/
- public void setBaseConditionsReader(ConditionsReader baseReader) {
- logger.config("setting conditions reader to " + baseReader.getClass().getCanonicalName());
- this.baseReader = baseReader;
- }
+ // FIXME: This doesn't work because the super class has some convoluted logic that always overrides
+ // whatever is set manually as the conditions reader.
+ //public void setBaseConditionsReader(ConditionsReader baseReader) {
+ // logger.config("setting conditions reader to " + baseReader.getClass().getCanonicalName());
+ // this.baseReader = baseReader;
+ //}
/**
* Get the next collection ID for a database conditions table.
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/ConditionsDatabaseConfiguration.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/ConditionsDatabaseConfiguration.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,49 +0,0 @@
-package org.hps.conditions.config;
-
-import org.hps.conditions.DatabaseConditionsManager;
-import org.lcsim.conditions.ConditionsReader;
-
-/**
- * Convenience class for setting up access to the conditions.
- */
-public class ConditionsDatabaseConfiguration {
-
- private String xmlConfig;
- private String connectionProp;
- private ConditionsReader reader;
- private DatabaseConditionsManager manager;
-
- /**
- * Constructor with XML config, connection properties and ConditionsReader.
- */
- public ConditionsDatabaseConfiguration(
- String xmlConfig,
- String connectionProp,
- ConditionsReader reader) {
- this.xmlConfig = xmlConfig;
- this.connectionProp = connectionProp;
- this.reader = reader;
- }
-
- /**
- * Constructor with XML config and connection properties.
- */
- public ConditionsDatabaseConfiguration(
- String xmlConfig,
- String connectionProp) {
- this.xmlConfig = xmlConfig;
- this.connectionProp = connectionProp;
- }
-
- /**
- * Setup the configuration on the conditions manager.
- */
- public void setup() {
- manager = new DatabaseConditionsManager();
- manager.configure(xmlConfig);
- manager.setConnectionResource(connectionProp);
- if (reader != null)
- manager.setBaseConditionsReader(reader);
- manager.register();
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/DefaultTestSetup.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/DefaultTestSetup.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,80 +0,0 @@
-package org.hps.conditions.config;
-
-import org.hps.conditions.DatabaseConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-
-/**
- * <p>
- * This is a static utility class for setting up the conditions system for test cases in
- * this package and sub-packages.
- * </p>
- * <p>
- * It uses the SLAC Test Run 2012 conditions database, with a relative reference to a file
- * containing connection parameters in the hps-conditions module. The XML configuration is
- * read from a classpath resource in the same module.
- * </p>
- * <p>
- * The detector is set to <i>HPS-conditions-test</i>, which is a test detector without
- * real data associated to it. There are a few files used in the test cases that use this
- * detector.
- * </p>
- * <p>
- * The run number is initially set to <i>1351</i> which is one of the "good runs".
- * </p>
- * <p>
- * Full setup can be performed with this method chain: <code>
- * DatabaseConditionsManager manager = new DefaultTestSetup().configure().setup();
- * </code>
- * </p>
- * <p>
- * To only configure the system without setting up detector and run, use the following:
- * <code>
- * new DefaultTestSetup().configure();
- * </code>
- * </p>
- *
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public final class DefaultTestSetup {
-
- // Default conditions manager parameters.
- static String connectionResource = "/org/hps/conditions/config/conditions_database_testrun_2012_connection.properties";
- static String conditionsConfig = "/org/hps/conditions/config/conditions_database_testrun_2012.xml";
-
- // Default test detector and run number for test cases not using real data.
- static String detectorName = "HPS-conditions-test";
- static int runNumber = 1351;
-
- DatabaseConditionsManager conditionsManager;
- boolean wasConfigured = false;
-
- /**
- * Configure and register the {@link DatabaseConditionsManager} with default
- * parameters.
- * @return an instance of this class for chaining (e.g. to call {@link #setup()}.
- */
- public DefaultTestSetup configure() {
- conditionsManager = new DatabaseConditionsManager();
- conditionsManager.setConnectionResource(connectionResource);
- conditionsManager.configure(conditionsConfig);
- conditionsManager.register();
- wasConfigured = true;
- return this;
- }
-
- /**
- * Setup the detector and run number conditions for the conditions manager. This is
- * mostly useful for test cases not using an <code>LCSimLoop</code>.
- * @return the conditions manager
- */
- public DatabaseConditionsManager setup() {
- if (!wasConfigured)
- configure();
- try {
- conditionsManager.setDetector(detectorName, runNumber);
- } catch (ConditionsNotFoundException e) {
- throw new RuntimeException(e);
- }
- return conditionsManager;
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/DetectorSetup.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/DetectorSetup.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,54 +0,0 @@
-package org.hps.conditions.config;
-
-import org.hps.conditions.DatabaseConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-
-/**
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public final class DetectorSetup {
-
- // Default conditions manager parameters.
- static final String connectionResource = "/org/hps/conditions/config/conditions_database_testrun_2012_connection.properties";
- static final String conditionsConfig = "/org/hps/conditions/config/conditions_database_testrun_2012.xml";
-
- String detectorName;
- int runNumber;
- DatabaseConditionsManager conditionsManager;
- boolean wasConfigured = false;
-
- public DetectorSetup(String detectorName, int runNumber) {
- this.detectorName = detectorName;
- this.runNumber = runNumber;
- }
-
- /**
- * Configure and register the {@link DatabaseConditionsManager} with default
- * parameters.
- * @return an instance of this class for chaining (e.g. to call {@link #setup()}.
- */
- public DetectorSetup configure() {
- conditionsManager = new DatabaseConditionsManager();
- conditionsManager.setConnectionResource(connectionResource);
- conditionsManager.configure(conditionsConfig);
- conditionsManager.register();
- wasConfigured = true;
- return this;
- }
-
- /**
- * Setup the detector and run number conditions for the conditions manager. This is
- * mostly useful for test cases not using an <code>LCSimLoop</code>.
- * @return the conditions manager
- */
- public DatabaseConditionsManager setup() {
- if (!wasConfigured)
- configure();
- try {
- conditionsManager.setDetector(detectorName, runNumber);
- } catch (ConditionsNotFoundException e) {
- throw new RuntimeException(e);
- }
- return conditionsManager;
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/DevDatabaseReadOnlyConfig.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/config/DevDatabaseReadOnlyConfig.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,44 +0,0 @@
-package org.hps.conditions.config;
-
-import org.hps.conditions.DatabaseConditionsManager;
-import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
-
-/**
- * Convenience class for setting up access to the conditions dev database.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class DevDatabaseReadOnlyConfig {
-
- private String xmlConfig = "/org/hps/conditions/config/conditions_dev.xml";
- private String connectionProp = "/org/hps/conditions/config/conditions_dev.properties";
- private DatabaseConditionsManager manager;
-
- /**
- * Constructor.
- */
- public DevDatabaseReadOnlyConfig() {
- }
-
- /**
- * Setup the XML config and connection properties on the conditions manager.
- */
- public void setup() {
- manager = new DatabaseConditionsManager();
- manager.configure(xmlConfig);
- manager.setConnectionResource(connectionProp);
- manager.register();
- }
-
- /**
- * Load a specific detector and run number to cache matching conditions.
- * @param detectorName The name of the detector.
- * @param runNumber The run number.
- */
- public void load(String detectorName, int runNumber) {
- try {
- manager.setDetector(detectorName, runNumber);
- } catch (ConditionsNotFoundException e) {
- throw new RuntimeException(e);
- }
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/ecal
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsLoader.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsLoader.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,87 +0,0 @@
-package org.hps.conditions.ecal;
-
-import java.util.List;
-
-import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
-import org.hps.conditions.ecal.EcalChannel.GeometryId;
-import org.lcsim.detector.converter.compact.EcalCrystal;
-import org.lcsim.detector.identifier.IIdentifierHelper;
-import org.lcsim.geometry.Subdetector;
-
-/**
- * Load {@link EcalConditions} data onto <code>EcalCrystal</code> objects.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public final class EcalConditionsLoader {
-
- /**
- * Load ECal conditions data onto a full detector object.
- * @param detector The detector object.
- * @param conditions The conditions object.
- */
- public void load(Subdetector subdetector, EcalConditions conditions) {
-
- // Find EcalCrystal objects.
- List<EcalCrystal> crystals = subdetector.getDetectorElement().findDescendants(EcalCrystal.class);
-
- // Get the ID helper.
- IIdentifierHelper helper = subdetector.getDetectorElement().getIdentifierHelper();
-
- // Get the system ID.
- int system = subdetector.getSystemID();
-
- // Get the full channel map created by the conditions system.
- EcalChannelCollection channelMap = conditions.getChannelCollection();
-
- // Build the map of geometry IDs.
- channelMap.buildGeometryMap(helper, system);
-
- // Loop over crystals.
- for (EcalCrystal crystal : crystals) {
-
- // System.out.println(crystal.getName() + " @ " + crystal.getX() + ", " +
- // crystal.getY());
-
- // Reset in case of existing conditions data.
- crystal.resetConditions();
-
- // Find the corresponding entry in the channel map for this crystal.
- int[] geomValues = new int[] { system, crystal.getX(), crystal.getY() };
- GeometryId geometryId = new GeometryId(helper, geomValues);
- EcalChannel channel = channelMap.findChannel(geometryId);
- if (channel == null) {
- throw new RuntimeException("EcalChannel not found for crystal: " + crystal.getName());
- }
-
- // Set the crate.
- crystal.setCrate(channel.getCrate());
-
- // Set the slot.
- crystal.setSlot(channel.getSlot());
-
- // Set the channel number.
- crystal.setChannel(channel.getChannel());
-
- // Get the channel constants.
- EcalChannelConstants constants = conditions.getChannelConstants(channel);
- if (constants == null) {
- throw new RuntimeException("EcalChannelConstants object not found for crystal: " + crystal.getName());
- }
-
- // Set bad channel.
- crystal.setBadChannel(constants.isBadChannel());
-
- // Set pedestal.
- crystal.setPedestal(constants.getCalibration().getPedestal());
-
- // Set noise.
- crystal.setNoise(constants.getCalibration().getNoise());
-
- // Set gain.
- crystal.setGain(constants.getGain().getGain());
-
- // Set time shift.
- crystal.setTimeShift(constants.getTimeShift().getTimeShift());
- }
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/ChannelConstants.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/ChannelConstants.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -5,13 +5,13 @@
*
* @author Omar Moreno <[log in to unmask]>
* @author Jeremy McCormick <[log in to unmask]>
- * @version $Id: ChannelConstants.java,v 1.5 2013/10/04 01:43:48 jeremy Exp $
+ *
*/
public final class ChannelConstants {
private SvtCalibration calibration = null;
private SvtGain gain = null;
- private SvtPulseParameters pulseParameters = null;
+ private SvtShapeFitParameters shapeFitParameters = null;
private boolean badChannel = false;
/**
@@ -24,8 +24,8 @@
* Set the pulse parameters.
* @param pulseParameters The pulse parameters
*/
- void setPulseParameters(SvtPulseParameters pulseParameters) {
- this.pulseParameters = pulseParameters;
+ void setShapeFitParameters(SvtShapeFitParameters shapeFitParameters) {
+ this.shapeFitParameters = shapeFitParameters;
}
/**
@@ -61,11 +61,11 @@
}
/**
- * Get the pulse parameters.
- * @return The pulse parameters.
+ * Get the shape fit parameters.
+ * @return The shape fit parameters.
*/
- public SvtPulseParameters getPulseParameters() {
- return pulseParameters;
+ public SvtShapeFitParameters getShapeFitParameters() {
+ return shapeFitParameters;
}
/**
@@ -94,7 +94,7 @@
buffer.append(", ");
buffer.append(getGain());
buffer.append(", ");
- buffer.append(getPulseParameters());
+ buffer.append(getShapeFitParameters());
return buffer.toString();
}
}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannel.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtBadChannel.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -11,5 +11,9 @@
public int getChannelId() {
return getFieldValue("svt_channel_id");
}
+
+ public int getNote(){
+ return getFieldValue("notes");
+ }
}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtCalibration.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtCalibration.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -3,12 +3,17 @@
import org.hps.conditions.AbstractConditionsObject;
import org.hps.conditions.ConditionsObjectCollection;
+import static org.hps.conditions.svt.SvtChannel.MAX_NUMBER_OF_SAMPLES;
+
/**
- * This class represents a noise and pedestal measurement for an SVT channel.
+ * This class encapsulates noise and pedestal measurement for an SVT channel.
+ *
* @author Jeremy McCormick <[log in to unmask]>
+ * @author Omar Moreno <[log in to unmask]>
*/
public final class SvtCalibration extends AbstractConditionsObject {
+
public static class SvtCalibrationCollection extends ConditionsObjectCollection<SvtCalibration> {
}
@@ -16,24 +21,30 @@
* Get the channel ID.
* @return The channel ID.
*/
- public int getChannelId() {
- return getFieldValue(Integer.class, "svt_channel_id");
+ public int getChannelID() {
+ return getFieldValue("svt_channel_id");
}
/**
* Get the noise value.
* @return The noise value.
*/
- public double getNoise() {
- return getFieldValue(Double.class, "noise");
+ public double getNoise(int sample) {
+ if(sample < 0 || sample > MAX_NUMBER_OF_SAMPLES){
+ throw new RuntimeException("Sample number is not within range.");
+ }
+ return getFieldValue(Double.class, "noise_" + Integer.toString(sample));
}
/**
* Get the pedestal value.
* @return The pedestal value.
*/
- public double getPedestal() {
- return getFieldValue(Double.class, "pedestal");
+ public double getPedestal(int sample) {
+ if(sample < 0 || sample > MAX_NUMBER_OF_SAMPLES){
+ throw new RuntimeException("Sample number is not within range.");
+ }
+ return getFieldValue(Double.class, "pedestal_" + Integer.toString(sample));
}
/**
@@ -41,6 +52,51 @@
* @return This object converted to a string.
*/
public String toString() {
- return "noise: " + getNoise() + ", pedestal: " + getPedestal();
+ StringBuffer buffer = new StringBuffer();
+ buffer.append("Channel ID: " + this.getChannelID());
+ for (int i = 0; i < 115; i++) {
+ buffer.append("-");
+ }
+ buffer.append("Pedestal sample 0:");
+ buffer.append(" ");
+ buffer.append("Pedestal sample 1:");
+ buffer.append(" ");
+ buffer.append("Pedestal sample 2:");
+ buffer.append(" ");
+ buffer.append("Pedestal sample 3:");
+ buffer.append(" ");
+ buffer.append("Pedesdtal sample 4:");
+ buffer.append(" ");
+ buffer.append("Pedestal sample 5:");
+ buffer.append("\n");
+ for (int i = 0; i < 115; i++) {
+ buffer.append("-");
+ }
+ buffer.append("\n");
+ for(int sample = 0; sample < MAX_NUMBER_OF_SAMPLES; sample++){
+ buffer.append(this.getPedestal(sample));
+ buffer.append(" ");
+ }
+ buffer.append("Noise sample 0:");
+ buffer.append(" ");
+ buffer.append("Noise sample 1:");
+ buffer.append(" ");
+ buffer.append("Noise sample 2:");
+ buffer.append(" ");
+ buffer.append("Noise sample 3:");
+ buffer.append(" ");
+ buffer.append("Noise sample 4:");
+ buffer.append(" ");
+ buffer.append("Noise sample 5:");
+ buffer.append("\n");
+ for (int i = 0; i < 115; i++) {
+ buffer.append("-");
+ }
+ buffer.append("\n");
+ for(int sample = 0; sample < MAX_NUMBER_OF_SAMPLES; sample++){
+ buffer.append(this.getNoise(sample));
+ buffer.append(" ");
+ }
+ return buffer.toString();
}
}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtChannel.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtChannel.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -12,22 +12,26 @@
import org.hps.util.Pair;
/**
- * This class represents SVT channel setup information, including hybrid, FPGA, and
- * channel numbers.
+ * This class represents SVT channel setup information, including FEB ID,
+ * FEB Hybrid ID, and channel numbers.
+ *
* @author Jeremy McCormick <[log in to unmask]>
+ * @author Omar Moreno <[log in to unmask]>
*/
public final class SvtChannel extends AbstractConditionsObject {
- public static class SvtChannelCollection extends ConditionsObjectCollection<SvtChannel> {
+ public static final int MAX_NUMBER_OF_SAMPLES = 6;
+ public static class SvtChannelCollection extends ConditionsObjectCollection<SvtChannel> {
+
Map<Integer, SvtChannel> channelMap = new HashMap<Integer, SvtChannel>();
public void add(SvtChannel channel) {
// Add to map.
- if (channelMap.containsKey(channel.getChannelId())) {
- throw new IllegalArgumentException("Channel ID already exists: " + channel.getChannelId());
+ if (channelMap.containsKey(channel.getChannelID())) {
+ throw new IllegalArgumentException("Channel ID already exists: " + channel.getChannelID());
}
- channelMap.put(channel.getChannelId(), channel);
+ channelMap.put(channel.getChannelID(), channel);
// Add to collection.
try {
@@ -42,16 +46,16 @@
}
/**
- * Find channels that match a DAQ pair (FPGA, hybrid).
+ * Find channels that match a DAQ pair (FEB ID, FEB Hybrid ID).
* @param pair The DAQ pair.
* @return The channels matching the DAQ pair or null if not found.
*/
public Collection<SvtChannel> find(Pair<Integer, Integer> pair) {
List<SvtChannel> channels = new ArrayList<SvtChannel>();
- int fpga = pair.getFirstElement();
- int hybrid = pair.getSecondElement();
+ int febID = pair.getFirstElement();
+ int febHybridID = pair.getSecondElement();
for (SvtChannel channel : this.getObjects()) {
- if (channel.getFpga() == fpga && channel.getHybrid() == hybrid) {
+ if (channel.getFebID() == febID && channel.getFebHybridID() == febHybridID) {
channels.add(channel);
}
}
@@ -75,24 +79,24 @@
* Get the channel ID.
* @return The channel ID.
*/
- public int getChannelId() {
+ public int getChannelID() {
return getFieldValue("channel_id");
}
/**
- * Get the hybrid number.
- * @return The hybrid number.
+ * Get the FEB ID.
+ * @return The FEB ID.
*/
- public int getHybrid() {
- return getFieldValue("hybrid");
+ public int getFebID() {
+ return getFieldValue("feb_id");
}
/**
- * Get the FPGA number.
- * @return The FPGA number.
+ * Get the FEB hybrid ID.
+ * @return The FEB hybrid ID.
*/
- public int getFpga() {
- return getFieldValue("fpga");
+ public int getFebHybridID() {
+ return getFieldValue("feb_hybrid_id");
}
/**
@@ -108,7 +112,7 @@
* @return This object as a string.
*/
public String toString() {
- return "channel_id: " + getChannelId() + ", fpga: " + getFpga() + ", hybrid: " + getHybrid() + ", channel: " + getChannel();
+ return "channel_id: " + getChannelID() + ", feb_id: " + getFebID() + ", feb_hybrid_id: " + getFebHybridID() + ", channel: " + getChannel();
}
/**
@@ -123,6 +127,6 @@
if (o == this)
return true;
SvtChannel channel = (SvtChannel) o;
- return getChannelId() == channel.getChannelId() && getHybrid() == channel.getHybrid() && getFpga() == channel.getFpga() && getHybrid() == channel.getHybrid();
+ return getChannelID() == channel.getChannelID() && getFebID() == channel.getFebID() && getFebHybridID() == channel.getFebHybridID() && getChannel() == channel.getChannel();
}
}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConditions.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConditions.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -5,15 +5,17 @@
import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
-import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
+import org.hps.conditions.svt.SvtT0Shift.SvtT0ShiftCollection;
+import static org.hps.conditions.svt.SvtChannel.MAX_NUMBER_OF_SAMPLES;
+
/**
* This class contains all SVT conditions data by readout channel. {@link SvtChannel}
* objects from the {@linkSvtChannelMap} should be used to lookup the conditions using the
* {@link #getChannelConstants(SvtChannel)} method.
*
* @author Jeremy McCormick <[log in to unmask]>
- * @version $Id: SvtConditions.java,v 1.11 2013/10/15 23:45:56 jeremy Exp $
+ * @author Omar Moreno <[log in to unmask]>
*/
public final class SvtConditions {
@@ -21,10 +23,11 @@
private Map<SvtChannel, ChannelConstants> channelData = new HashMap<SvtChannel, ChannelConstants>();
private SvtChannelCollection channelMap = null;
private SvtDaqMappingCollection daqMap = null;
- private SvtTimeShiftCollection timeShifts = null;
+ private SvtT0ShiftCollection t0Shifts = null;
/**
* Class constructor, which takes a channel map.
+ *
* @param channelMap The SVT channel map.
*/
SvtConditions(SvtChannelCollection channelMap) {
@@ -35,6 +38,7 @@
* Get the conditions constants for a specific channel. These will be created if they
* do not exist for the given channel, BUT only channels in the current channel map
* are allowed as an argument.
+ *
* @param channel The SVT channel.
* @return The conditions constants for the channel.
* @throws IllegalArgumentException if .
@@ -53,6 +57,7 @@
/**
* Get the {@link SvtChannelCollection} for this set of conditions.
+ *
* @return The SVT channel map.
*/
public SvtChannelCollection getChannelMap() {
@@ -61,6 +66,7 @@
/**
* Get the {@link SvtDaqMappingCollection} associated with these conditions.
+ *
* @return The SVT DAQ map.
*/
public SvtDaqMappingCollection getDaqMap() {
@@ -68,15 +74,17 @@
}
/**
- * Get the {@link SvtTimeShiftCollection}.
- * @return The time shifts by sensor.
+ * Get the {@link SvtT0ShiftCollection}.
+ *
+ * @return The t0 shifts by sensor.
*/
- public SvtTimeShiftCollection getTimeShifts() {
- return timeShifts;
+ public SvtT0ShiftCollection getT0Shifts() {
+ return t0Shifts;
}
/**
* Set the {@link SvtDaqMappingCollection} associated with these conditions.
+ *
* @param daqMap The SVT DAQ map.
*/
void setDaqMap(SvtDaqMappingCollection daqMap) {
@@ -84,11 +92,12 @@
}
/**
- * Set the sensor time shifts.
- * @param timeShifts The sensor time shifts collection.
+ * Set the sensor t0 shifts.
+ *
+ * @param t0Shifts The sensor time shifts collection.
*/
- void setTimeShifts(SvtTimeShiftCollection timeShifts) {
- this.timeShifts = timeShifts;
+ void setTimeShifts(SvtT0ShiftCollection t0Shifts) {
+ this.t0Shifts = t0Shifts;
}
/**
@@ -96,7 +105,10 @@
* table of channel data independently of how its member objects implement their
* string conversion method. For now, it does not print the time shifts by sensor as
* all other information is by channel.
+ *
* @return This object converted to a string, without the DAQ map.
+ * TODO: Make this look more human readable. At the moment, reading this
+ * requires a huge terminal window.
*/
public String toString() {
StringBuffer buff = new StringBuffer();
@@ -107,31 +119,49 @@
buff.append('\n');
// Table header:
- buff.append("id");
+ buff.append("Channel ID");
buff.append(" ");
- buff.append("fpga");
+ buff.append("FEB ID");
buff.append(" ");
- buff.append("hybrid");
+ buff.append("FEB Hybrid ID");
buff.append(" ");
- buff.append("channel");
+ buff.append("Channel");
buff.append(" ");
- buff.append("noise");
+ buff.append("Pedestal sample 0");
buff.append(" ");
- buff.append("pedestal");
- buff.append(" ");
- buff.append("gain");
+ buff.append("Pedestal sample 1");
+ buff.append(" ");
+ buff.append("Pedestal sample 2");
+ buff.append(" ");
+ buff.append("Pedestal sample 3");
+ buff.append(" ");
+ buff.append("Pedestal sample 4");
+ buff.append(" ");
+ buff.append("Pedestal sample 5");
+ buff.append(" ");
+ buff.append("Noise sample 0");
+ buff.append(" ");
+ buff.append("Noise sample 1");
+ buff.append(" ");
+ buff.append("Noise sample 2");
+ buff.append(" ");
+ buff.append("Noise sample 3");
+ buff.append(" ");
+ buff.append("Noise sample 4");
+ buff.append(" ");
+ buff.append("Noise sample 5");
+ buff.append(" ");
+ buff.append("Gain");
buff.append(" ");
- buff.append("offset");
+ buff.append("Offset");
buff.append(" ");
- buff.append("amplitude");
+ buff.append("Amplitude");
buff.append(" ");
buff.append("t0");
buff.append(" ");
- buff.append("shift");
+ buff.append("tp");
buff.append(" ");
- buff.append("chisq");
- buff.append(" ");
- buff.append("bad");
+ buff.append("Bad Channels");
buff.append('\n');
for (int i = 0; i < 115; i++) {
buff.append("-");
@@ -143,20 +173,27 @@
// Get the conditions for the channel.
ChannelConstants constants = getChannelConstants(channel);
SvtGain gain = constants.getGain();
- SvtPulseParameters pulse = constants.getPulseParameters();
+ SvtShapeFitParameters shapeFit = constants.getShapeFitParameters();
SvtCalibration calibration = constants.getCalibration();
// Channel data.
- buff.append(String.format("%-6d %-5d %-8d %-8d ", channel.getChannelId(), channel.getFpga(), channel.getHybrid(), channel.getChannel()));
+ buff.append(String.format("%-6d %-5d %-8d %-8d ", channel.getChannelID(), channel.getFebID(), channel.getFebHybridID(), channel.getChannel()));
// Calibration.
- buff.append(String.format("%-9.4f %-11.4f ", calibration.getNoise(), calibration.getPedestal()));
+ for(int sample = 0; sample < MAX_NUMBER_OF_SAMPLES; sample++){
+ buff.append(calibration.getPedestal(sample));
+ buff.append(" ");
+ }
+ for(int sample = 0; sample < MAX_NUMBER_OF_SAMPLES; sample++){
+ buff.append(calibration.getNoise(sample));
+ buff.append(" ");
+ }
// Gain.
buff.append(String.format("%-6.4f %-9.4f ", gain.getGain(), gain.getOffset()));
// Pulse shape.
- buff.append(String.format("%-10.4f %-8.4f %-8.4f %-10.4f ", pulse.getAmplitude(), pulse.getT0(), pulse.getTimeShift(), pulse.getChisq()));
+ buff.append(String.format("%-10.4f %-8.4f %-8.4f", shapeFit.getAmplitude(), shapeFit.getT0(), shapeFit.getTp()));
// Bad channel.
buff.append(constants.isBadChannel());
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsConverter.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsConverter.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -8,71 +8,147 @@
import static org.hps.conditions.TableConstants.SVT_PULSE_PARAMETERS;
import static org.hps.conditions.TableConstants.SVT_TIME_SHIFTS;
+import org.hps.conditions.DatabaseConditionsManager;
+import org.hps.conditions.TableMetaData;
import org.hps.conditions.svt.SvtBadChannel.SvtBadChannelCollection;
import org.hps.conditions.svt.SvtCalibration.SvtCalibrationCollection;
import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
import org.hps.conditions.svt.SvtGain.SvtGainCollection;
-import org.hps.conditions.svt.SvtPulseParameters.SvtPulseParametersCollection;
-import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
+import org.hps.conditions.svt.SvtShapeFitParameters.SvtShapeFitParametersCollection;
+import org.hps.conditions.svt.SvtT0Shift.SvtT0ShiftCollection;
import org.lcsim.conditions.ConditionsConverter;
import org.lcsim.conditions.ConditionsManager;
/**
* This class creates an {@link SvtConditions} object from the database, based on the
* current run number known by the conditions manager.
+ *
+ * @author Jeremy McCormick <[log in to unmask]>
+ * @author Omar Moreno <[log in to unmask]>
*/
public final class SvtConditionsConverter implements ConditionsConverter<SvtConditions> {
+ private TableMetaData metaData = null;
+ private String tableName = null;
+
/**
* Create and return the SVT conditions object.
* @param manager The current conditions manager.
* @param name The conditions key, which is ignored for now.
*/
public SvtConditions getData(ConditionsManager manager, String name) {
-
- // Get the SVT channel map.
- SvtChannelCollection channels = manager.getCachedConditions(SvtChannelCollection.class, SVT_CHANNELS).getCachedData();
-
- // Create the conditions object.
+
+ DatabaseConditionsManager dbConditionsManager = (DatabaseConditionsManager) manager;
+
+ // Get the table name containing the SVT channel map from the
+ // database configuration. If it doesn't exist, use the default value.
+ metaData = dbConditionsManager.findTableMetaData(SvtChannelCollection.class);
+ if(metaData != null){
+ tableName = metaData.getTableName();
+ } else {
+ tableName = SVT_CHANNELS;
+ }
+ // Get the SVT channel map from the conditions database
+ SvtChannelCollection channels
+ = dbConditionsManager.getCachedConditions(SvtChannelCollection.class, tableName).getCachedData();
+
+ // Create the SVT conditions object to use to encapsulate SVT condition collections
SvtConditions conditions = new SvtConditions(channels);
- // Create the DAQ map.
- SvtDaqMappingCollection daqMap = manager.getCachedConditions(SvtDaqMappingCollection.class, SVT_DAQ_MAP).getCachedData();
+ // Get the table name containing the SVT DAQ map from the database
+ // configuration. If it doesn't exist, use the default value.
+ metaData = dbConditionsManager.findTableMetaData(SvtDaqMappingCollection.class);
+ if(metaData != null){
+ tableName = metaData.getTableName();
+ } else {
+ tableName = SVT_DAQ_MAP;
+ }
+ // Get the DAQ map from the conditions database
+ SvtDaqMappingCollection daqMap = manager.getCachedConditions(SvtDaqMappingCollection.class, tableName).getCachedData();
conditions.setDaqMap(daqMap);
- // Add calibrations by channel.
- SvtCalibrationCollection calibrations = manager.getCachedConditions(SvtCalibrationCollection.class, SVT_CALIBRATIONS).getCachedData();
+ // Get the table name containing the SVT calibrations (baseline, noise)
+ // from the database configuration. If it doesn't exist, use the
+ // default value.
+ metaData = dbConditionsManager.findTableMetaData(SvtCalibrationCollection.class);
+ if(metaData != null){
+ tableName = metaData.getTableName();
+ } else {
+ tableName = SVT_CALIBRATIONS;
+ }
+ // Get the calibrations from the conditions database
+ SvtCalibrationCollection calibrations = manager.getCachedConditions(SvtCalibrationCollection.class, tableName).getCachedData();
for (SvtCalibration calibration : calibrations.getObjects()) {
- SvtChannel channel = conditions.getChannelMap().findChannel(calibration.getChannelId());
+ SvtChannel channel = conditions.getChannelMap().findChannel(calibration.getChannelID());
conditions.getChannelConstants(channel).setCalibration(calibration);
}
+ // Get the table name containing the SVT pulse shape parameters from
+ // the database configuration. If it doesn't exist, use the default value.
+ metaData = dbConditionsManager.findTableMetaData(SvtShapeFitParametersCollection.class);
+ if(metaData != null){
+ tableName = metaData.getTableName();
+ } else {
+ tableName = SVT_PULSE_PARAMETERS;
+ }
// Add pulse parameters by channel.
- SvtPulseParametersCollection pulseParametersCollection = manager.getCachedConditions(SvtPulseParametersCollection.class, SVT_PULSE_PARAMETERS).getCachedData();
- for (SvtPulseParameters pulseParameters : pulseParametersCollection.getObjects()) {
- SvtChannel channel = conditions.getChannelMap().findChannel(pulseParameters.getChannelId());
- conditions.getChannelConstants(channel).setPulseParameters(pulseParameters);
+ SvtShapeFitParametersCollection shapeFitParametersCollection = manager.getCachedConditions(SvtShapeFitParametersCollection.class, tableName).getCachedData();
+ for (SvtShapeFitParameters shapeFitParameters : shapeFitParametersCollection.getObjects()) {
+ SvtChannel channel = conditions.getChannelMap().findChannel(shapeFitParameters.getChannelID());
+ conditions.getChannelConstants(channel).setShapeFitParameters(shapeFitParameters);
}
+ // Get the table name containing the SVT bad channel map from the
+ // database configuration. If it doesn't exist, use the default value.
+ metaData = dbConditionsManager.findTableMetaData(SvtBadChannelCollection.class);
+ if(metaData != null){
+ tableName = metaData.getTableName();
+ } else {
+ }
+ tableName = SVT_BAD_CHANNELS;
+
// Add bad channels.
- SvtBadChannelCollection badChannels = manager.getCachedConditions(SvtBadChannelCollection.class, SVT_BAD_CHANNELS).getCachedData();
- for (SvtBadChannel badChannel : badChannels.getObjects()) {
- SvtChannel channel = conditions.getChannelMap().findChannel(badChannel.getChannelId());
- conditions.getChannelConstants(channel).setBadChannel(true);
+ // FIXME: This should be changed to catch a conditions record not found exception instead of
+ // a runtime exception.
+ try {
+ SvtBadChannelCollection badChannels = manager.getCachedConditions(SvtBadChannelCollection.class, tableName).getCachedData();
+ for (SvtBadChannel badChannel : badChannels.getObjects()) {
+ SvtChannel channel = conditions.getChannelMap().findChannel(badChannel.getChannelId());
+ conditions.getChannelConstants(channel).setBadChannel(true);
+ }
+ } catch(RuntimeException e){
+ e.printStackTrace();
}
+ // Get the table name containing the SVT gains from the database
+ // configuration. If it doesn't exist, use the default value.
+ metaData = dbConditionsManager.findTableMetaData(SvtGainCollection.class);
+ if(metaData != null){
+ tableName = metaData.getTableName();
+ } else {
+ tableName = SVT_GAINS;
+ }
+
// Add gains by channel.
- SvtGainCollection gains = manager.getCachedConditions(SvtGainCollection.class, SVT_GAINS).getCachedData();
+ SvtGainCollection gains = manager.getCachedConditions(SvtGainCollection.class, tableName).getCachedData();
for (SvtGain object : gains.getObjects()) {
int channelId = object.getChannelID();
SvtChannel channel = conditions.getChannelMap().findChannel(channelId);
conditions.getChannelConstants(channel).setGain(object);
}
- // Set the time shifts by sensor.
- SvtTimeShiftCollection timeShifts = manager.getCachedConditions(SvtTimeShiftCollection.class, SVT_TIME_SHIFTS).getCachedData();
- conditions.setTimeShifts(timeShifts);
+ // Get the table name containing the SVT t0 shifts. If it doesn't
+ // exist, use the default value.
+ metaData = dbConditionsManager.findTableMetaData(SvtT0ShiftCollection.class);
+ if(metaData != null){
+ tableName = metaData.getTableName();
+ } else {
+ tableName = SVT_TIME_SHIFTS;
+ }
+ // Set the t0 shifts by sensor.
+ SvtT0ShiftCollection t0Shifts = manager.getCachedConditions(SvtT0ShiftCollection.class, tableName).getCachedData();
+ conditions.setTimeShifts(t0Shifts);
return conditions;
}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConditionsLoader.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,87 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.util.Collection;
-import java.util.List;
-
-import org.hps.conditions.svt.SvtChannel.SvtChannelCollection;
-import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
-import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
-import org.lcsim.detector.tracker.silicon.HpsSiSensor;
-import org.lcsim.geometry.Detector;
-import org.hps.util.Pair;
-
-/**
- * This class loads {@link SvtConditions} data onto <code>HpsSiSensor</code> objects.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public final class SvtConditionsLoader {
-
- /**
- * Load conditions data onto a detector object. This method is analogous to
- * {@link org.lcsim.hps.recon.tracking.SvtUtils#setup(Detector)}.
- * @param detector The detector object.
- * @param conditions The conditions object.
- */
- // FIXME: Change this to use a Subdetector instead of the Detector.
- public void load(Detector detector, SvtConditions conditions) {
-
- // Find sensor objects.
- List<HpsSiSensor> sensors = detector.getDetectorElement().findDescendants(HpsSiSensor.class);
- SvtChannelCollection channelMap = conditions.getChannelMap();
- SvtDaqMappingCollection daqMap = conditions.getDaqMap();
- SvtTimeShiftCollection timeShifts = conditions.getTimeShifts();
-
- // Loop over sensors.
- for (HpsSiSensor sensor : sensors) {
-
- // Reset possible existing conditions data on sensor.
- sensor.reset();
-
- // Get the layer number.
- int layerNumber = sensor.getLayerNumber();
-
- // Get info from the DAQ map about this sensor.
- Pair<Integer, Integer> daqPair = null;
- int half = SvtDaqMappingCollection.TOP_HALF;
- if (sensor.isBottomLayer()) {
- half = SvtDaqMappingCollection.BOTTOM_HALF;
- }
- daqPair = daqMap.get(half, layerNumber);
- if (daqPair == null) {
- throw new RuntimeException("Failed to find DAQ pair for sensor: " + sensor.getName());
- }
-
- // Set FPGA value from DAQ map.
- sensor.setFpgaNumber(daqPair.getFirstElement());
-
- // Set hybrid value from DAQ map.
- sensor.setHybridNumber(daqPair.getSecondElement());
-
- // Find all the channels for this sensor.
- Collection<SvtChannel> channels = channelMap.find(daqPair);
-
- // Loop over the channels of the sensor.
- for (SvtChannel channel : channels) {
- // Get conditions data for this channel.
- ChannelConstants constants = conditions.getChannelConstants(channel);
- int channelNumber = channel.getChannel();
-
- //
- // Set conditions data for this channel on the sensor object:
- //
- if (constants.isBadChannel()) {
- sensor.setBadChannel(channelNumber);
- }
- sensor.setGain(channelNumber, constants.getGain().getGain());
- sensor.setTimeOffset(channelNumber, constants.getGain().getOffset());
- sensor.setNoise(channelNumber, constants.getCalibration().getNoise());
- sensor.setPedestal(channelNumber, constants.getCalibration().getPedestal());
- sensor.setPulseParameters(channelNumber, constants.getPulseParameters().toArray());
- }
-
- // Set the time shift for the sensor.
- SvtTimeShift sensorTimeShift = timeShifts.find(daqPair).get(0);
- sensor.setTimeShift(sensorTimeShift.getTimeShift());
- }
- }
-}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConfigurationLoader.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConfigurationLoader.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -9,7 +9,7 @@
import org.hps.conditions.DatabaseConditionsManager;
import org.hps.conditions.TableConstants;
-import org.hps.conditions.config.ConditionsDatabaseConfiguration;
+import org.hps.conditions.config.ResourceConfiguration;
/**
* Load an SVT configuration XML file into the conditions database from a file.
@@ -28,8 +28,8 @@
DatabaseConditionsManager manager;
public SvtConfigurationLoader() {
- // FIXME: Configuration hard-coded to conditions dev database.
- new ConditionsDatabaseConfiguration(
+ // FIXME: Configuration hard-coded here.
+ new ResourceConfiguration(
"/org/hps/conditions/config/conditions_dev.xml",
"/org/hps/conditions/config/conditions_dev_local.properties").setup();
manager = DatabaseConditionsManager.getInstance();
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConverterRegistry.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtConverterRegistry.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -8,8 +8,8 @@
import org.hps.conditions.svt.SvtConfiguration.SvtConfigurationCollection;
import org.hps.conditions.svt.SvtDaqMapping.SvtDaqMappingCollection;
import org.hps.conditions.svt.SvtGain.SvtGainCollection;
-import org.hps.conditions.svt.SvtPulseParameters.SvtPulseParametersCollection;
-import org.hps.conditions.svt.SvtTimeShift.SvtTimeShiftCollection;
+import org.hps.conditions.svt.SvtShapeFitParameters.SvtShapeFitParametersCollection;
+import org.hps.conditions.svt.SvtT0Shift.SvtT0ShiftCollection;
/**
* Definitions of converters from the database to SVT specific conditions classes.
@@ -60,15 +60,15 @@
}
}
- public static class SvtPulseParametersConverter extends ConditionsObjectConverter<SvtPulseParametersCollection> {
+ public static class SvtShapeFitParametersConverter extends ConditionsObjectConverter<SvtShapeFitParametersCollection> {
public Class getType() {
- return SvtPulseParametersCollection.class;
+ return SvtShapeFitParametersCollection.class;
}
}
- public static class SvtTimeShiftConverter extends ConditionsObjectConverter<SvtTimeShiftCollection> {
+ public static class SvtT0ShiftConverter extends ConditionsObjectConverter<SvtT0ShiftCollection> {
public Class getType() {
- return SvtTimeShiftCollection.class;
+ return SvtT0ShiftCollection.class;
}
}
}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -4,6 +4,12 @@
import org.hps.conditions.ConditionsObjectCollection;
import org.hps.util.Pair;
+/**
+ * This class encapsulates the SVT DAQ map.
+ *
+ * @author Jeremy McCormick <[log in to unmask]>
+ * @author Omar Moreno <[log in to unmask]>
+ */
public final class SvtDaqMapping extends AbstractConditionsObject {
public static class SvtDaqMappingCollection extends ConditionsObjectCollection<SvtDaqMapping> {
@@ -11,23 +17,87 @@
/**
* Flag values for top or bottom half.
*/
- public static final int TOP_HALF = 0;
- public static final int BOTTOM_HALF = 1;
+ public static final String TOP_HALF = "T";
+ public static final String BOTTOM_HALF = "B";
+
+ /**
+ * Flag values for axial or stereo sensors
+ */
+ public static final String AXIAL = "A";
+ public static final String STEREO = "S";
/**
- * Get a DAQ pair (FPGA, hybrid) by top/bottom number and layer number.
- * @param half Value indicating top or bottom half of detector.
- * @param layerNumber The layer number.
+ * Get a DAQ pair (FEB ID, FEB Hybrid ID) by SVT volume, layer number
+ * and module number.
+ *
+ * @param svtHalf Value indicating top or bottom half of detector
+ * @param layerNumber The layer number
+ * @param moduleNumber The module number (needed to identify layer's 4-6)
* @return The DAQ pair for the half and layer number or null if does not exist.
*/
- Pair<Integer, Integer> get(int half, int layerNumber) {
- for (SvtDaqMapping object : this.getObjects()) {
- if (object.getHalf() == half && object.getLayerNumber() == layerNumber) {
- return new Pair<Integer, Integer>(object.getFpgaNumber(), object.getHybridNumber());
- }
+ Pair<Integer, Integer> getDaqPair(String SvtHalf, int layerNumber, int moduleNumber) {
+
+ for (SvtDaqMapping object : this.getObjects()) {
+
+ if (SvtHalf.equals(object.getSvtHalf()) && object.getLayerNumber() == layerNumber) {
+
+ // If the sensor belongs to the first three layers of the SVT
+ // and the detector layer and SVT half match, no further searching
+ // is required.
+ if(layerNumber <= 6){
+ return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
+ }
+
+ // If the sensor belongs to layers 4-6, then find the matching
+ // DAQ pair by looking at combinations of FEB hybrid ID's and module
+ // numbers. At the moment, it is assumed that odd SVT layers will
+ // be connected to even FEB hybrid channels and even SVT layers to odd
+ // FEB hybrid channels.
+ // TODO: Changes should be made to HpsSiSensor that will allow this
+ // portion of the matching to be greatly simplified.
+ if(SvtHalf.equals(TOP_HALF)){
+ if(layerNumber%2 != 0
+ && ((object.getFebHybridID() == 0 && moduleNumber == 0)
+ || object.getFebHybridID() == 2 && moduleNumber == 2)){
+ return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
+ } else if(layerNumber %2 == 0 &&((object.getFebHybridID() == 1 && moduleNumber == 0)
+ || object.getFebHybridID() == 3 && moduleNumber == 2)) {
+ return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
+ }
+ } else if(SvtHalf.equals(BOTTOM_HALF)){
+ if(layerNumber%2 != 0
+ && ((object.getFebHybridID() == 0 && moduleNumber == 1)
+ || object.getFebHybridID() == 2 && moduleNumber == 3)){
+ return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
+ } else if(layerNumber %2 == 0 &&((object.getFebHybridID() == 1 && moduleNumber == 1)
+ || object.getFebHybridID() == 3 && moduleNumber == 3)) {
+ return new Pair<Integer, Integer>(object.getFebID(), object.getFebHybridID());
+ }
+ }
+ }
}
return null;
}
+
+ /**
+ * Get the orientation of a sensor using the FEB ID and FEB Hybrid ID.
+ * If the FEB ID and FEB Hybrid ID combination is not found, return null.
+ *
+ * @param daqPair (Pair<FEB ID, FEB Hybrid ID>) for a given sensor
+ * @return If a daqPair is found, return an "A" if the sensor
+ * orientation is Axial, an "S" if the orientation is Stereo or
+ * null if the daqPair doesn't exist.
+ */
+ public String getOrientation(Pair<Integer, Integer> daqPair){
+
+ for(SvtDaqMapping daqMapping : this.getObjects()){
+
+ if(daqPair.getFirstElement() == daqMapping.getFebID() && daqPair.getSecondElement() == daqMapping.getFebHybridID()){
+ return daqMapping.getOrientation();
+ }
+ }
+ return null;
+ }
/**
* Convert this object to a string.
@@ -35,43 +105,61 @@
*/
public String toString() {
StringBuffer buff = new StringBuffer();
- buff.append("half");
+ buff.append("FEB ID: ");
buff.append(" ");
- buff.append("layer");
+ buff.append("FEB Hybrid ID: ");
buff.append(" ");
- buff.append("fpga");
+ buff.append("Hybrid ID: ");
buff.append(" ");
- buff.append("hybrid");
+ buff.append("SVT half: ");
+ buff.append(" ");
+ buff.append("Layer");
+ buff.append(" ");
+ buff.append("Orientation: ");
+ buff.append(" ");
buff.append('\n');
buff.append("----------------------");
buff.append('\n');
for (SvtDaqMapping object : getObjects()) {
- buff.append(object.getHalf());
+ buff.append(object.getFebID());
buff.append(" ");
+ buff.append(object.getFebHybridID());
+ buff.append(" ");
+ buff.append(object.getHybridID());
+ buff.append(" ");
+ buff.append(object.getSvtHalf());
+ buff.append(" ");
buff.append(String.format("%-2d", object.getLayerNumber()));
buff.append(" ");
- buff.append(object.getFpgaNumber());
+ buff.append(object.getOrientation());
buff.append(" ");
- buff.append(object.getHybridNumber());
buff.append('\n');
}
return buff.toString();
}
}
-
- public int getHalf() {
- return getFieldValue("half");
+
+ public int getFebID() {
+ return getFieldValue("feb_id");
}
+
+ public int getFebHybridID() {
+ return getFieldValue("feb_hybrid_id");
+ }
+
+ public int getHybridID() {
+ return getFieldValue("hybrid_id");
+ }
+
+ public String getSvtHalf() {
+ return getFieldValue("svt_half");
+ }
public int getLayerNumber() {
return getFieldValue("layer");
}
- public int getFpgaNumber() {
- return getFieldValue("fpga");
+ public String getOrientation() {
+ return getFieldValue("orientation");
}
-
- public int getHybridNumber() {
- return getFieldValue("hybrid");
- }
}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParameters.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtPulseParameters.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,75 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.AbstractConditionsObject;
-import org.hps.conditions.ConditionsObjectCollection;
-
-/**
- * This class represents the pulse parameters for an SVT channel.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public final class SvtPulseParameters extends AbstractConditionsObject {
-
- public static class SvtPulseParametersCollection extends ConditionsObjectCollection<SvtPulseParameters> {
- }
-
- /**
- * Get the SVT channel ID.
- * @return The SVT channel ID.
- */
- int getChannelId() {
- return getFieldValue(Integer.class, "svt_channel_id");
- }
-
- /**
- * Get the amplitude.
- * @return The amplifude.
- */
- double getAmplitude() {
- return getFieldValue(Double.class, "amplitude");
- }
-
- /**
- * Get the starting time.
- * @return The starting time.
- */
- double getT0() {
- return getFieldValue(Double.class, "t0");
- }
-
- /**
- * Get the time shift.
- * @return The time shift.
- */
- double getTimeShift() {
- return getFieldValue(Double.class, "tp");
- }
-
- /**
- * Get the chisq.
- * @return The chisq.
- */
- double getChisq() {
- return getFieldValue(Double.class, "chisq");
- }
-
- /**
- * Convert this object to a human readable string.
- * @return This object converted to a string.
- */
- public String toString() {
- return "amp: " + getAmplitude() + ", t0: " + getT0() + ", shift: " + getTimeShift() + ", chisq: " + getChisq();
- }
-
- /**
- * Convert this object to an array of doubles.
- * @return This object converted to an array of doubles.
- */
- public double[] toArray() {
- double[] values = new double[4];
- values[0] = getAmplitude();
- values[1] = getT0();
- values[2] = getTimeShift();
- values[3] = getChisq();
- return values;
- }
-}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShift.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/java/org/hps/conditions/svt/SvtTimeShift.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,57 +0,0 @@
-package org.hps.conditions.svt;
-
-import org.hps.conditions.AbstractConditionsObject;
-import org.hps.conditions.ConditionsObjectCollection;
-import org.hps.conditions.ConditionsObjectException;
-import org.hps.util.Pair;
-
-/**
- * This class is a data holder for associating a time shift with a specific sensor by FPGA
- * and hybrid numbers.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public final class SvtTimeShift extends AbstractConditionsObject {
-
- public static class SvtTimeShiftCollection extends ConditionsObjectCollection<SvtTimeShift> {
-
- SvtTimeShiftCollection find(Pair<Integer, Integer> pair) {
- SvtTimeShiftCollection timeShifts = new SvtTimeShiftCollection();
- int fpga = pair.getFirstElement();
- int hybrid = pair.getSecondElement();
- for (SvtTimeShift timeShift : getObjects()) {
- if (timeShift.getFpga() == fpga && timeShift.getHybrid() == hybrid) {
- try {
- timeShifts.add(timeShift);
- } catch (ConditionsObjectException e) {
- throw new RuntimeException(e);
- }
- }
- }
- return timeShifts;
- }
- }
-
- /**
- * Get the FPGA number.
- * @return The FPGA number.
- */
- int getFpga() {
- return getFieldValue("fpga");
- }
-
- /**
- * Get the hybrid number.
- * @return The hybrid number.
- */
- int getHybrid() {
- return getFieldValue("hybrid");
- }
-
- /**
- * Get the time shift.
- * @return The time shift.
- */
- double getTimeShift() {
- return getFieldValue("time_shift");
- }
-}
java/branches/hps-java_HPSJAVA-88/conditions/src/main/resources/org/hps/conditions/config
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/resources/org/hps/conditions/config/conditions_database_testrun_2012.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/resources/org/hps/conditions/config/conditions_database_testrun_2012.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -6,14 +6,18 @@
<converter class="org.hps.conditions.ConditionsRecordConverter"/>
<!-- SVT converters -->
+ <!--
<converter class="org.hps.conditions.svt.SvtConditionsConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtBadChannelConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtCalibrationConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtChannelConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtDaqMappingConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtGainConverter"/>
- <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtPulseParametersConverter"/>
+ <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtPulseParametersConverter"/>
+ <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtShapeFitParametersConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtTimeShiftConverter"/>
+ <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtT0ShiftConverter"/>
+ -->
<!-- ECal converters -->
<converter class="org.hps.conditions.ecal.EcalConditionsConverter"/>
@@ -49,7 +53,8 @@
<field name="collection_id" />
</fields>
</table>
-
+
+<!--
<table key="svt_channels" name="svt_channels">
<classes>
<object class="org.hps.conditions.svt.SvtChannel"/>
@@ -73,10 +78,25 @@
<field name="gain" />
<field name="offset" />
</fields>
- </table>
+ </table>
<table key="svt_pulse_parameters" name="svt_pulse_parameters">
<classes>
+ <object class="org.hps.conditions.svt.SvtShapeFitParameters"/>
+ <collection class="org.hps.conditions.svt.SvtShapeFitParameters$SvtShapeFitParametersCollection"/>
+ </classes>
+ <fields>
+ <field name="svt_channel_id" />
+ <field name="amplitude" />
+ <field name="t0" />
+ <field name="tp" />
+ </fields>
+ </table>
+-->
+
+<!--
+ <table key="svt_pulse_parameters" name="svt_pulse_parameters">
+ <classes>
<object class="org.hps.conditions.svt.SvtPulseParameters"/>
<collection class="org.hps.conditions.svt.SvtPulseParameters$SvtPulseParametersCollection"/>
</classes>
@@ -88,7 +108,9 @@
<field name="chisq" />
</fields>
</table>
+-->
+<!--
<table key="svt_calibrations" name="svt_calibrations">
<classes>
<object class="org.hps.conditions.svt.SvtCalibration"/>
@@ -100,6 +122,20 @@
<field name="pedestal" />
</fields>
</table>
+-->
+
+<!--
+ <table key="svt_t0_shifts" name="svt_t0_shifts">
+ <classes>
+ <object class="org.hps.conditions.svt.SvtT0Shift"/>
+ <collection class="org.hps.conditions.svt.SvtT0Shift$SvtT0ShiftCollection"/>
+ </classes>
+ <fields>
+ <field name="feb_id" />
+ <field name="feb_hybrid_id" />
+ <field name="t0_shift" />
+ </fields>
+ </table>
<table key="svt_time_shifts" name="svt_time_shifts">
<classes>
@@ -112,7 +148,9 @@
<field name="time_shift" />
</fields>
</table>
-
+-->
+
+<!--
<table key="svt_bad_channels" name="svt_bad_channels">
<classes>
<object class="org.hps.conditions.svt.SvtBadChannel"/>
@@ -135,6 +173,7 @@
<field name="hybrid" />
</fields>
</table>
+-->
<table key="ecal_bad_channels" name="ecal_bad_channels">
<classes>
java/branches/hps-java_HPSJAVA-88/conditions/src/main/resources/org/hps/conditions/config
--- java/branches/hps-java_HPSJAVA-88/conditions/src/main/resources/org/hps/conditions/config/conditions_dev.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/main/resources/org/hps/conditions/config/conditions_dev.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -12,10 +12,10 @@
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtCalibrationConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtConfigurationConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtChannelConverter"/>
- <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtDaqMappingConverter"/>
+ <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtDaqMappingConverter"/>
<converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtGainConverter"/>
- <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtPulseParametersConverter"/>
- <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtTimeShiftConverter"/>
+ <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtShapeFitParametersConverter"/>
+ <converter class="org.hps.conditions.svt.SvtConverterRegistry$SvtT0ShiftConverter"/>
<!-- ECal converters -->
<converter class="org.hps.conditions.ecal.EcalConditionsConverter"/>
@@ -53,6 +53,10 @@
<field name="collection_id" />
</fields>
</table>
+
+ <!--
+ SVT Tables
+ -->
<table key="svt_alignments" name="svt_alignments">
<classes>
@@ -72,8 +76,8 @@
</classes>
<fields>
<field name="channel_id" />
- <field name="fpga" />
- <field name="hybrid" />
+ <field name="feb_id" />
+ <field name="feb_hybrid_id" />
<field name="channel" />
</fields>
</table>
@@ -102,17 +106,16 @@
</fields>
</table>
- <table key="svt_pulse_parameters" name="svt_pulse_parameters">
+ <table key="svt_shape_fit_parameters" name="svt_shape_fit_parameters">
<classes>
- <object class="org.hps.conditions.svt.SvtPulseParameters"/>
- <collection class="org.hps.conditions.svt.SvtPulseParameters$SvtPulseParametersCollection"/>
+ <object class="org.hps.conditions.svt.SvtShapeFitParameters"/>
+ <collection class="org.hps.conditions.svt.SvtShapeFitParameters$SvtShapeFitParametersCollection"/>
</classes>
<fields>
<field name="svt_channel_id" />
<field name="amplitude" />
<field name="t0" />
<field name="tp" />
- <field name="chisq" />
</fields>
</table>
@@ -123,20 +126,30 @@
</classes>
<fields>
<field name="svt_channel_id" />
- <field name="noise" />
- <field name="pedestal" />
+ <field name="pedestal_0" />
+ <field name="pedestal_1" />
+ <field name="pedestal_2" />
+ <field name="pedestal_3" />
+ <field name="pedestal_4" />
+ <field name="pedestal_5" />
+ <field name="noise_0" />
+ <field name="noise_1" />
+ <field name="noise_2" />
+ <field name="noise_3" />
+ <field name="noise_4" />
+ <field name="noise_5" />
</fields>
</table>
- <table key="svt_time_shifts" name="svt_time_shifts">
+ <table key="svt_t0_shifts" name="svt_t0_shifts">
<classes>
- <object class="org.hps.conditions.svt.SvtTimeShift"/>
- <collection class="org.hps.conditions.svt.SvtTimeShift$SvtTimeShiftCollection"/>
+ <object class="org.hps.conditions.svt.SvtT0Shift"/>
+ <collection class="org.hps.conditions.svt.SvtT0Shift$SvtT0ShiftCollection"/>
</classes>
<fields>
- <field name="fpga" />
- <field name="hybrid" />
- <field name="time_shift" />
+ <field name="feb_id" />
+ <field name="feb_hybrid_id" />
+ <field name="t0_shift" />
</fields>
</table>
@@ -147,22 +160,29 @@
</classes>
<fields>
<field name="svt_channel_id" />
+ <field name="notes" />
</fields>
</table>
- <table key="svt_daq_map" name="svt_daq_map">
- <classes>
- <object class="org.hps.conditions.svt.SvtDaqMapping"/>
- <collection class="org.hps.conditions.svt.SvtDaqMapping$SvtDaqMappingCollection"/>
+ <table key="svt_daq_map" name="svt_daq_map">
+ <classes>
+ <object class="org.hps.conditions.svt.SvtDaqMapping"/>
+ <collection class="org.hps.conditions.svt.SvtDaqMapping$SvtDaqMappingCollection"/>
</classes>
<fields>
- <field name="half" />
+ <field name="feb_id" />
+ <field name="feb_hybrid_id" />
+ <field name="hybrid_id" />
+ <field name="svt_half" />
<field name="layer" />
- <field name="fpga" />
- <field name="hybrid" />
+ <field name="orientation" />
</fields>
</table>
-
+
+ <!--
+ ECal Tables
+ -->
+
<table key="ecal_bad_channels" name="ecal_bad_channels">
<classes>
<object class="org.hps.conditions.ecal.EcalBadChannel"/>
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsDevTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsDevTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -5,50 +5,37 @@
import org.lcsim.conditions.ConditionsManager.ConditionsNotFoundException;
/**
- * Read conditions from the dev database.
+ * Read conditions from the dev database and print them out.
* @author Jeremy McCormick <[log in to unmask]>
*/
public class ConditionsDevTest extends TestCase {
-
- static final String[] conditionsKeys = {
- TableConstants.ECAL_CALIBRATIONS,
- TableConstants.ECAL_CHANNELS,
- TableConstants.ECAL_GAINS,
- TableConstants.ECAL_LEDS,
- TableConstants.ECAL_TIME_SHIFTS,
- TableConstants.SVT_ALIGNMENTS,
- TableConstants.SVT_CALIBRATIONS,
- TableConstants.SVT_CHANNELS,
- TableConstants.SVT_DAQ_MAP,
- TableConstants.SVT_GAINS,
- TableConstants.SVT_PULSE_PARAMETERS,
- TableConstants.SVT_TIME_SHIFTS
- };
+ static String config = "/org/hps/conditions/config/conditions_dev.xml";
+ static String prop = "/org/hps/conditions/config/conditions_dev.properties";
+
public void testConditionsDev() {
DatabaseConditionsManager manager = new DatabaseConditionsManager();
- manager.configure("/org/hps/conditions/config/conditions_dev.xml");
- manager.setConnectionResource("/org/hps/conditions/config/conditions_dev.properties");
+ manager.configure(config);
+ manager.setConnectionResource(prop);
manager.register();
try {
manager.setDetector("HPS-Proposal2014-v8-6pt6", 0);
} catch (ConditionsNotFoundException e) {
throw new RuntimeException(e);
}
-
- for (String conditionsKey : conditionsKeys) {
- TableMetaData metaData = manager.findTableMetaData(conditionsKey);
- ConditionsSeries series = manager.getConditionsSeries(metaData.getKey());
- for (int i = 0; i < series.getNumberOfCollections(); i++) {
- ConditionsObjectCollection<AbstractConditionsObject> collection = series.getCollection(i);
- System.out.println("Printing " + collection.getObjects().size()
- + " objects in collection " + metaData.getKey() + " ...");
- for (ConditionsObject object : collection.getObjects()) {
- System.out.println(object.toString());
+
+ for (TableMetaData metaData : manager.getTableMetaDataList()) {
+ ConditionsSeries series = manager.getConditionsSeries(metaData.getKey());
+ if (series.getNumberOfCollections() > 0) {
+ for (int i = 0; i < series.getNumberOfCollections(); i++) {
+ ConditionsObjectCollection<AbstractConditionsObject> collection = series.getCollection(i);
+ System.out.println("Printing " + collection.getObjects().size() + " objects in collection " + metaData.getKey() + " ...");
+ for (ConditionsObject object : collection.getObjects()) {
+ System.out.println(object.toString());
+ }
}
- }
+ }
}
}
-
}
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsDriverTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsDriverTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -42,7 +42,9 @@
// Configure the loop.
loop.setLCIORecordSource(testFile);
- loop.add(new ConditionsDriver());
+ ConditionsDriver conditionsDriver = new ConditionsDriver();
+ conditionsDriver.setLoadSvtConditions(false);
+ loop.add(conditionsDriver);
RunNumberDriver runNumberDriver = new RunNumberDriver();
loop.add(runNumberDriver);
@@ -80,31 +82,31 @@
*/
static class RunNumberDriver extends Driver {
- int _currentRun = -1;
- int _nruns = 0;
- List<Integer> _runsProcessed = new ArrayList<Integer>();
- Set<Integer> _uniqueRuns = new LinkedHashSet<Integer>();
+ int currentRun = -1;
+ int nruns = 0;
+ List<Integer> runsProcessed = new ArrayList<Integer>();
+ Set<Integer> uniqueRuns = new LinkedHashSet<Integer>();
public void process(EventHeader event) {
int runNumber = event.getRunNumber();
- if (runNumber != _currentRun) {
- _currentRun = runNumber;
- _uniqueRuns.add(_currentRun);
- _runsProcessed.add(_currentRun);
- _nruns++;
+ if (runNumber != currentRun) {
+ currentRun = runNumber;
+ uniqueRuns.add(currentRun);
+ runsProcessed.add(currentRun);
+ nruns++;
}
}
int getNumberOfRuns() {
- return _nruns;
+ return nruns;
}
List<Integer> getRunsProcessed() {
- return _runsProcessed;
+ return runsProcessed;
}
Set<Integer> getUniqueRuns() {
- return _uniqueRuns;
+ return uniqueRuns;
}
}
}
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsObjectTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsObjectTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -2,7 +2,7 @@
import junit.framework.TestCase;
-import org.hps.conditions.config.DefaultTestSetup;
+import org.hps.conditions.config.TestRunReadOnlyConfiguration;
import org.hps.conditions.svt.SvtGain;
import org.hps.conditions.svt.SvtGain.SvtGainCollection;
@@ -19,7 +19,8 @@
DatabaseConditionsManager conditionsManager;
public void setUp() {
- conditionsManager = new DefaultTestSetup().configure().setup();
+ new TestRunReadOnlyConfiguration(true);
+ conditionsManager = DatabaseConditionsManager.getInstance();
}
public void testBasicOperations() throws ConditionsObjectException {
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsSeriesConverterTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ConditionsSeriesConverterTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -2,7 +2,7 @@
import junit.framework.TestCase;
-import org.hps.conditions.config.DefaultTestSetup;
+import org.hps.conditions.config.TestRunReadOnlyConfiguration;
import org.hps.conditions.svt.SvtBadChannel;
import org.hps.conditions.svt.SvtBadChannel.SvtBadChannelCollection;
@@ -12,7 +12,8 @@
DatabaseConditionsManager conditionsManager;
public void setUp() {
- conditionsManager = new DefaultTestSetup().configure().setup();
+ new TestRunReadOnlyConfiguration(true);
+ conditionsManager = DatabaseConditionsManager.getInstance();
}
public void testConditionsSeries() {
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/DatabaseConditionsManagerTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/DatabaseConditionsManagerTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,24 +1,25 @@
package org.hps.conditions;
-import org.hps.conditions.config.DefaultTestSetup;
-
import junit.framework.TestCase;
+import org.hps.conditions.config.TestRunReadOnlyConfiguration;
+
public class DatabaseConditionsManagerTest extends TestCase {
- DatabaseConditionsManager _conditionsManager;
+ DatabaseConditionsManager conditionsManager;
public void setUp() {
- _conditionsManager = new DefaultTestSetup().configure().setup();
+ new TestRunReadOnlyConfiguration(true);
+ conditionsManager = DatabaseConditionsManager.getInstance();
}
@SuppressWarnings("rawtypes")
public void testLoad() {
// Load data from every table registered with the manager.
- for (TableMetaData metaData : _conditionsManager.getTableMetaDataList()) {
+ for (TableMetaData metaData : conditionsManager.getTableMetaDataList()) {
System.out.println(">>>> loading conditions from table: " + metaData.getKey());
- ConditionsObjectCollection conditionsObjects = _conditionsManager.getConditionsData(metaData.getCollectionClass(), metaData.getKey());
+ ConditionsObjectCollection conditionsObjects = conditionsManager.getConditionsData(metaData.getCollectionClass(), metaData.getKey());
System.out.println(" " + conditionsObjects.getObjects().size() + " " + conditionsObjects.get(0).getClass().getSimpleName() + " objects were created.");
}
}
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/beam
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/beam/BeamCurrentTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/beam/BeamCurrentTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -57,7 +57,9 @@
// Configure and run the loop.
loop.setLCIORecordSource(testFile);
- loop.add(new ConditionsDriver());
+ ConditionsDriver conditionsDriver = new ConditionsDriver();
+ conditionsDriver.setLoadSvtConditions(false);
+ loop.add(conditionsDriver);
loop.add(new BeamCurrentChecker());
loop.loop(-1, null);
}
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsConverterTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsConverterTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -3,7 +3,7 @@
import junit.framework.TestCase;
import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.config.DefaultTestSetup;
+import org.hps.conditions.config.TestRunReadOnlyConfiguration;
/**
* Tests that a {@link EcalConditions} objects loads without errors.
@@ -11,14 +11,14 @@
*/
public class EcalConditionsConverterTest extends TestCase {
+ DatabaseConditionsManager conditionsManager;
+
public void setUp() {
- new DefaultTestSetup().configure().setup();
+ new TestRunReadOnlyConfiguration(true);
+ conditionsManager = DatabaseConditionsManager.getInstance();
}
- public void test() {
-
- DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
-
+ public void test() {
// Test that the manager gets ECAL conditions.
EcalConditions conditions = conditionsManager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();
assertNotNull(conditions);
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsLoaderTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalConditionsLoaderTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,105 +0,0 @@
-package org.hps.conditions.ecal;
-
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.config.DefaultTestSetup;
-import org.lcsim.detector.converter.compact.EcalCrystal;
-import org.lcsim.geometry.Detector;
-
-/**
- * This test loads ECal conditions data onto the detector and checks some of the results
- * for basic validity.
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class EcalConditionsLoaderTest extends TestCase {
-
- /** Expected number of crystals. */
- private static final int CRYSTAL_COUNT_ANSWER = 442;
-
- /** Expected number of bad channels. */
- private static final int BAD_CHANNELS_ANSWER = 44;
-
- /** Valid minimum and maximum values for DAQ setup parameters. */
- private static final int MIN_CRATE_ANSWER = 1;
- private static final int MAX_CRATE_ANSWER = 2;
- private static final int MIN_SLOT_ANSWER = 3;
- private static final int MAX_SLOT_ANSWER = 19;
- private static final int MIN_CHANNEL_ANSWER = 0;
- private static final int MAX_CHANNEL_ANSWER = 19;
-
- // The total number of crystals that should be processed.
- private static final int CRYSTAL_COUNT = 442;
-
- public void setUp() {
- new DefaultTestSetup().configure().setup();
- }
-
- /**
- * Load SVT conditions data onto the detector and perform basic checks afterwards.
- */
- public void testLoad() {
-
- DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
-
- // Get the detector.
- Detector detector = conditionsManager.getCachedConditions(Detector.class, "compact.xml").getCachedData();
-
- // Get conditions.
- EcalConditions conditions = conditionsManager.getCachedConditions(EcalConditions.class, "ecal_conditions").getCachedData();
-
- // Load conditions onto detector.
- EcalConditionsLoader loader = new EcalConditionsLoader();
- loader.load(detector.getSubdetector("Ecal"), conditions);
-
- // Get crystals from detector.
- List<EcalCrystal> crystals = detector.getDetectorElement().findDescendants(EcalCrystal.class);
-
- // Check number of crystals.
- assertEquals("Wrong number of crystals.", CRYSTAL_COUNT_ANSWER, crystals.size());
-
- // Counter for bad channels.
- int badChannelCount = 0;
-
- // Loop over crystals.
- int ncrystals = 0;
- for (EcalCrystal crystal : crystals) {
-
- // Get DAQ information.
- int crate = crystal.getCrate();
- int slot = crystal.getSlot();
- int channel = crystal.getChannel();
-
- // Check basic validity of DAQ setup information.
- assertTrue("Crate number is out of range.", crate >= MIN_CRATE_ANSWER && crate <= MAX_CRATE_ANSWER);
- assertTrue("Slot number is out of range.", slot >= MIN_SLOT_ANSWER && slot <= MAX_SLOT_ANSWER);
- assertTrue("Channel number is out of range.", MIN_CHANNEL_ANSWER >= 0 && channel <= MAX_CHANNEL_ANSWER);
-
- // Get time dependent conditions.
- double pedestal = crystal.getPedestal();
- double noise = crystal.getNoise();
- double gain = crystal.getGain();
- boolean badChannel = crystal.isBadChannel();
-
- // Check basic validity of conditions. They should all be non-zero.
- assertTrue("Pedestal value is zero.", pedestal != 0);
- assertTrue("Noise value is zero.", noise != 0);
- assertTrue("Gain value is zero.", gain != 0);
-
- // Increment bad channel count.
- if (badChannel)
- ++badChannelCount;
-
- ++ncrystals;
- }
-
- assertEquals("The number of crystals was wrong.", CRYSTAL_COUNT, ncrystals);
-
- // Check total number of bad channels.
- assertEquals("Wrong number of bad channels.", BAD_CHANNELS_ANSWER, badChannelCount);
-
- System.out.println("Successfully loaded conditions onto " + ncrystals + " ECal crystals!");
- }
-}
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalGainCompareTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalGainCompareTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,17 +1,16 @@
package org.hps.conditions.ecal;
+import static org.hps.conditions.deprecated.EcalConditions.makePhysicalID;
+import static org.hps.conditions.deprecated.EcalConditions.physicalToGain;
import junit.framework.TestCase;
import org.hps.conditions.DatabaseConditionsManager;
import org.hps.conditions.TableConstants;
-import org.hps.conditions.config.DetectorSetup;
+import org.hps.conditions.config.TestRunReadOnlyConfiguration;
import org.hps.conditions.deprecated.CalibrationDriver;
import org.hps.conditions.ecal.EcalChannel.EcalChannelCollection;
import org.hps.conditions.ecal.EcalGain.EcalGainCollection;
-import static org.hps.conditions.deprecated.EcalConditions.makePhysicalID;
-import static org.hps.conditions.deprecated.EcalConditions.physicalToGain;
-
/**
* This is a test to compare the ECAL channel gain values between
* the old text-based conditions and the new database system, in order
@@ -23,28 +22,31 @@
DatabaseConditionsManager conditionsManager;
- static final String detectorName = "HPS-TestRun-v8-5";
- static final int runNumber = 1351;
-
public void setUp() {
- conditionsManager = new DetectorSetup(detectorName, 0).configure().setup();
+ new TestRunReadOnlyConfiguration(true);
+ conditionsManager = DatabaseConditionsManager.getInstance();
}
public void testEcalGainCompareTest() {
+ // Load the old text-based conditions for the ECAL in order to compare against database values.
CalibrationDriver calibrationDriver = new CalibrationDriver();
calibrationDriver.detectorChanged(conditionsManager.getDetectorObject());
+ // Fetch conditions from the database.
EcalGainCollection gains = conditionsManager.getConditionsData(EcalGainCollection.class, TableConstants.ECAL_GAINS);
EcalChannelCollection channels = conditionsManager.getConditionsData(EcalChannelCollection.class, TableConstants.ECAL_CHANNELS);
- for (EcalGain gain : gains) {
-
- EcalChannel channel = channels.findChannel(gain.getChannelId());
-
+
+ // Loop over the gain values and compare them with each other.
+ int nCompared = 0;
+ for (EcalGain gain : gains) {
+ EcalChannel channel = channels.findChannel(gain.getChannelId());
long physicalID = makePhysicalID(channel.getX(), channel.getY());
- double oldGainValue = physicalToGain(physicalID);
-
+ double oldGainValue = physicalToGain(physicalID);
assertEquals("The new and old gain values are different.", gain.getGain(), oldGainValue);
+ ++nCompared;
}
+ System.out.println("Compared " + nCompared + " ECAL gain values.");
+ assertEquals("Wrong number of gain values compared.", 442, nCompared);
}
}
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalLedTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/EcalLedTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -4,7 +4,7 @@
import org.hps.conditions.DatabaseConditionsManager;
import org.hps.conditions.TableConstants;
-import org.hps.conditions.config.DevDatabaseReadOnlyConfig;
+import org.hps.conditions.config.DevReadOnlyConfiguration;
import org.hps.conditions.ecal.EcalLed.EcalLedCollection;
/**
@@ -13,12 +13,9 @@
* @author Jeremy McCormick <[log in to unmask]>
*/
public class EcalLedTest extends TestCase {
-
- DevDatabaseReadOnlyConfig db = new DevDatabaseReadOnlyConfig();
-
+
public void setUp() {
- db.setup();
- db.load("HPS-TestRun-v5", 0);
+ new DevReadOnlyConfiguration().setup().load("HPS-TestRun-v5", 0);
}
public void testEcalLed() {
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/PhysicalToGainTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/ecal/PhysicalToGainTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -43,7 +43,9 @@
// Configure the loop.
loop.setLCIORecordSource(testFile);
- loop.add(new ConditionsDriver());
+ ConditionsDriver conditionsDriver = new ConditionsDriver();
+ conditionsDriver.setLoadSvtConditions(false);
+ loop.add(conditionsDriver);
loop.add(new PhysicalToGainDriver());
// Run a few events.
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtBadChannelTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtBadChannelTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -19,8 +19,7 @@
import org.lcsim.util.loop.LCSimLoop;
/**
- * This class tests that {@link org.lcsim.hps.conditions.ConditionsDriver} works
- * correctly.
+ * This class tests that the correct bad channel conditions are found for the test run.
* @author Jeremy McCormick <[log in to unmask]>
*/
public class SvtBadChannelTest extends TestCase {
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsConverterTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsConverterTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -3,27 +3,28 @@
import junit.framework.TestCase;
import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.config.DefaultTestSetup;
+import org.hps.conditions.config.DevReadOnlyConfiguration;
/**
- * This test loads and prints {@link SvtConditions}, which internally uses the
- * {@link SvtConditionsConverter}. It does not perform any assertions.
+ * This test loads and prints {@link SvtConditions} from the dev database,
+ * which internally uses the {@link SvtConditionsConverter}. It does not
+ * perform any assertions.
*
* @author Jeremy McCormick <[log in to unmask]>
*/
public class SvtConditionsConverterTest extends TestCase {
- public void setUp() {
- new DefaultTestSetup().configure().setup();
+ DatabaseConditionsManager conditionsManager;
+
+ public void setUp() {
+ new DevReadOnlyConfiguration().setup().load("HPS-Proposal2014-v7-2pt2", 0);
+ conditionsManager = DatabaseConditionsManager.getInstance();
}
/**
* Load and print all SVT conditions for a certain run number.
*/
- public void test() {
-
- DatabaseConditionsManager conditionsManager = DatabaseConditionsManager.getInstance();
-
+ public void test() {
// Get conditions and print them out.
SvtConditions svt = conditionsManager.getCachedConditions(SvtConditions.class, "svt_conditions").getCachedData();
assertNotNull(svt);
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsLoaderTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtConditionsLoaderTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,116 +0,0 @@
-package org.hps.conditions.svt;
-
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.config.DefaultTestSetup;
-import org.lcsim.detector.tracker.silicon.HpsSiSensor;
-import org.lcsim.geometry.Detector;
-
-/**
- * This test loads {@link SvtConditions} data onto the detector and then checks that all
- * channels of each sensor have non-zero data values for applicable parameters.
- *
- * @author Jeremy McCormick <[log in to unmask]>
- */
-public class SvtConditionsLoaderTest extends TestCase {
-
- /**
- * The number of bad channels that should be returned for the run. One of these is a
- * duplicate so the row count is actually 442 in the database.
- */
- private static final int BAD_CHANNELS_ANSWER = 441;
-
- /** The number of channels where pulse information is all zeroes. */
- private static final int PULSE_NOT_SET_ANSWER = 4;
-
- // Total number of sensors that should be processed.
- private static final int SENSOR_COUNT = 12800;
-
- DatabaseConditionsManager conditionsManager;
-
- public void setUp() {
- conditionsManager = new DefaultTestSetup().configure().setup();
- }
-
- /**
- * Load SVT conditions data onto the detector and perform basic checks afterwards.
- */
- public void test() {
-
- // Get the detector.
- Detector detector = conditionsManager.getCachedConditions(Detector.class, "compact.xml").getCachedData();
-
- // Get conditions.
- SvtConditions conditions = conditionsManager.getCachedConditions(SvtConditions.class, "svt_conditions").getCachedData();
-
- // Load conditions onto detector.
- SvtConditionsLoader loader = new SvtConditionsLoader();
- loader.load(detector, conditions);
-
- // Check sensor data.
- List<HpsSiSensor> sensors = detector.getDetectorElement().findDescendants(HpsSiSensor.class);
- final int nchannels = sensors.get(0).getNumberOfChannels();
- int badChannels = 0;
- int pulseNotSet = 0;
- int nsensors = 0;
- // Loop over sensors.
- for (HpsSiSensor sensor : sensors) {
- // Loop over channels.
- for (int channel = 0; channel < nchannels; channel++) {
-
- // Check that hardware information seems reasonable.
- int hybrid = sensor.getHybridNumber();
- assertTrue("Invalid hybrid value.", hybrid >= 0 && hybrid <= 2);
- int fpga = sensor.getFpgaNumber();
- assertTrue("Invalid FPGA value.", fpga >= 0 && fpga <= 6);
-
- // Check that conditions values are not zero:
- assertTrue("Gain is zero.", sensor.getGain(channel) != 0);
- assertTrue("Noise is zero.", sensor.getNoise(channel) != 0);
- assertTrue("Pedestal is zero.", sensor.getPedestal(channel) != 0);
- assertTrue("Time offset is zero.", sensor.getTimeOffset(channel) != 0);
- assertTrue("PulseParameters points to null.", sensor.getPulseParameters(channel) != null);
- double[] pulse = sensor.getPulseParameters(channel);
-
- // There are four channels in the database where these are all zeroes.
- if (pulse[0] != 0) {
- // Check pulse parameters:
- assertTrue("amplitude is zero.", pulse[0] != 0);
- assertTrue("t0 is zero.", pulse[1] != 0);
- assertTrue("tp is zero.", pulse[2] != 0);
- assertTrue("chisq is zero.", pulse[3] != 0);
- } else {
- pulseNotSet += 1;
- }
-
- // Add to bad channel count.
- if (sensor.isBadChannel(channel)) {
- ++badChannels;
- }
- ++nsensors;
- }
-
- // Check that time shift is set for the sensor. When unset, it's value will be
- // NaN.
- assertTrue("Time shift was not set.", sensor.getTimeShift() != Double.NaN);
- }
-
- // Check for correct number of sensors processed.
- assertEquals("The number of sensors was wrong.", SENSOR_COUNT, nsensors);
-
- // Check that there were at least some bad channels.
- assertTrue("Number of bad channels was zero.", badChannels != 0);
-
- // Now check the exact number of bad channels, which should be the QA set plus
- // those for run 1351.
- assertEquals("Wrong number of dead channels found.", BAD_CHANNELS_ANSWER, badChannels);
-
- // There should be exactly 4 channels where the pulse parameters are all zeroes.
- assertEquals("The number of channels for which pulse was not set is wrong.", PULSE_NOT_SET_ANSWER, pulseNotSet);
-
- System.out.println("Successfully loaded conditions data onto " + nsensors + " SVT sensors!");
- }
-}
java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt
--- java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtConfigurationTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/conditions/src/test/java/org/hps/conditions/svt/SvtConfigurationTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -6,7 +6,7 @@
import org.hps.conditions.DatabaseConditionsManager;
import org.hps.conditions.TableConstants;
-import org.hps.conditions.config.ConditionsDatabaseConfiguration;
+import org.hps.conditions.config.ResourceConfiguration;
import org.hps.conditions.svt.SvtConfiguration.SvtConfigurationCollection;
import org.jdom.Document;
import org.jdom.JDOMException;
@@ -16,7 +16,7 @@
DatabaseConditionsManager manager;
public void setUp() {
- new ConditionsDatabaseConfiguration(
+ new ResourceConfiguration(
"/org/hps/conditions/config/conditions_dev.xml",
"/org/hps/conditions/config/conditions_dev_local.properties").setup();
manager = DatabaseConditionsManager.getInstance();
java/branches/hps-java_HPSJAVA-88/detector-data/detectors/HPS-conditions-test
--- java/branches/hps-java_HPSJAVA-88/detector-data/detectors/HPS-conditions-test/detector.properties 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/detector-data/detectors/HPS-conditions-test/detector.properties 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1 +1,2 @@
-name: HPS-TestRun-v4
+name: HPS-conditions-test
+ConditionsReader: org.hps.conditions.deprecated.TestRunConditionsReader
java/branches/hps-java_HPSJAVA-88/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui
--- java/branches/hps-java_HPSJAVA-88/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -98,7 +98,8 @@
// Set the scaling settings.
ecalPanel.setScaleMinimum(0.00001);
ecalPanel.setScaleMaximum(3);
- ecalPanel.setScalingLogarithmic();
+ // ecalPanel.setScalingLogarithmic();
+ ecalPanel.setScalingLinear();
// Disable the crystals in the calorimeter panel along the beam gap.
for (int i = -23; i < 24; i++) {
java/branches/hps-java_HPSJAVA-88/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/branches/hps-java_HPSJAVA-88/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,5 +1,8 @@
package org.hps.readout.ecal;
+import hep.aida.IHistogram1D;
+import hep.aida.IHistogram2D;
+
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@@ -8,6 +11,7 @@
import org.hps.recon.ecal.ECalUtils;
import org.hps.recon.ecal.HPSEcalCluster;
import org.lcsim.event.EventHeader;
+import org.lcsim.util.aida.AIDA;
/**
* Class <code>FADCPrimaryTriggerDriver</code> reads reconstructed
@@ -59,6 +63,37 @@
private int pairEnergySlopeCount = 0; // Track the pairs which pass the energy slope cut.
private int pairCoplanarityCount = 0; // Track the pairs which pass the coplanarity cut.
+ // ==================================================================
+ // ==== Trigger Distribution Plots ==================================
+ // ==================================================================
+ private AIDA aida = AIDA.defaultInstance();
+ IHistogram1D clusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution", 176, 0.0, 2.2);
+ IHistogram1D clusterSeedEnergy100 = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Over 100 MeV)", 176, 0.0, 2.2);
+ IHistogram1D clusterSeedEnergySingle = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
+ IHistogram1D clusterSeedEnergyAll = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+ IHistogram1D clusterHitCount = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution", 9, 1, 10);
+ IHistogram1D clusterHitCount100 = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Over 100 MeV)", 9, 1, 10);
+ IHistogram1D clusterHitCountSingle = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed Single Cuts)", 9, 1, 10);
+ IHistogram1D clusterHitCountAll = aida.histogram1D("Trigger Plots :: Cluster Hit Count Distribution (Passed All Cuts)", 9, 1, 10);
+ IHistogram1D clusterTotalEnergy = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution", 176, 0.0, 2.2);
+ IHistogram1D clusterTotalEnergy100 = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Over 100 MeV)", 176, 0.0, 2.2);
+ IHistogram1D clusterTotalEnergySingle = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed Single Cuts)", 176, 0.0, 2.2);
+ IHistogram1D clusterTotalEnergyAll = aida.histogram1D("Trigger Plots :: Cluster Total Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+
+ IHistogram1D pairEnergySum = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution", 176, 0.0, 4.4);
+ IHistogram1D pairEnergySumAll = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution (Passed All Cuts)", 176, 0.0, 4.4);
+ IHistogram1D pairEnergyDifference = aida.histogram1D("Trigger Plots :: Pair Energy Difference Distribution", 176, 0.0, 2.2);
+ IHistogram1D pairEnergyDifferenceAll = aida.histogram1D("Trigger Plots :: Pair Energy Difference Distribution (Passed All Cuts)", 176, 0.0, 2.2);
+ IHistogram1D pairCoplanarity = aida.histogram1D("Trigger Plots :: Pair Coplanarity Distribution", 360, 0.0, 180.0);
+ IHistogram1D pairCoplanarityAll = aida.histogram1D("Trigger Plots :: Pair Coplanarity Distribution (Passed All Cuts)", 360, 0.0, 180.0);
+ IHistogram1D pairEnergySlope = aida.histogram1D("Trigger Plots :: Pair Energy Slope Distribution", 400, 0.0, 4.0);
+ IHistogram1D pairEnergySlopeAll = aida.histogram1D("Trigger Plots :: Pair Energy Slope Distribution (Passed All Cuts)", 400, 0.0, 4.0);
+
+ IHistogram2D clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 44, -22.0, 22.0, 10, -5, 5);
+ IHistogram2D clusterDistribution100 = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Over 100 MeV)", 44, -23, 23, 11, -5.5, 5.5);
+ IHistogram2D clusterDistributionSingle = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+ IHistogram2D clusterDistributionAll = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+
/**
* Prints out the results of the trigger at the end of the run.
*/
@@ -107,6 +142,28 @@
// Increment the number of processed clusters.
allClusters++;
+ // Get the cluster plot values.
+ int hitCount = cluster.getCalorimeterHits().size();
+ double seedEnergy = cluster.getSeedHit().getCorrectedEnergy();
+ double clusterEnergy = cluster.getEnergy();
+ int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
+ int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
+ if(ix > 0) { ix = ix - 1; }
+
+ // Fill the general plots.
+ clusterSeedEnergy.fill(seedEnergy, 1);
+ clusterTotalEnergy.fill(clusterEnergy, 1);
+ clusterHitCount.fill(hitCount, 1);
+ clusterDistribution.fill(ix, iy, 1);
+
+ // Fill the "over 100 MeV" plots if applicable.
+ if(seedEnergy >= 0.100) {
+ clusterSeedEnergy100.fill(seedEnergy, 1);
+ clusterTotalEnergy100.fill(clusterEnergy, 1);
+ clusterHitCount100.fill(hitCount, 1);
+ clusterDistribution100.fill(ix, iy, 1);
+ }
+
// ==== Seed Hit Energy Cut ====================================
// =============================================================
// If the cluster fails the cut, skip to the next cluster.
@@ -131,6 +188,12 @@
// Otherwise, note that it passed the cut.
clusterTotalEnergyCount++;
+ // Fill the "passed single cuts" plots.
+ clusterSeedEnergySingle.fill(seedEnergy, 1);
+ clusterTotalEnergySingle.fill(clusterEnergy, 1);
+ clusterHitCountSingle.fill(hitCount, 1);
+ clusterDistributionSingle.fill(ix, iy, 1);
+
// A cluster that passes all of the single-cluster cuts
// can be used in cluster pairs.
goodClusterList.add(cluster);
@@ -486,8 +549,6 @@
double[] clusterAngle = new double[2];
for(int i = 0; i < 2; i++) {
double position[] = clusterPair[i].getSeedHit().getPosition();
- //clusterAngle[i] = Math.toDegrees(Math.atan2(position[1], position[0] - originX));
- //clusterAngle[i] = (clusterAngle[i] + 180.0) % 180.0;
clusterAngle[i] = (Math.toDegrees(Math.atan2(position[1], position[0] - originX)) + 180.0) % 180.0;
}
@@ -711,6 +772,18 @@
// Increment the number of processed cluster pairs.
allPairs++;
+ // Get the plot values for the pair cuts.
+ double energySum = getValueEnergySum(clusterPair);
+ double energyDifference = getValueEnergyDifference(clusterPair);
+ double energySlope = getValueEnergySlope(clusterPair);
+ double coplanarity = getValueCoplanarity(clusterPair);
+
+ // Fill the general plots.
+ pairEnergySum.fill(energySum, 1);
+ pairEnergyDifference.fill(energyDifference, 1);
+ pairEnergySlope.fill(energySlope, 1);
+ pairCoplanarity.fill(coplanarity, 1);
+
// ==== Pair Energy Sum Cut ====================================
// =============================================================
// If the cluster fails the cut, skip to the next pair.
@@ -744,6 +817,35 @@
// Otherwise, note that it passed the cut.
pairCoplanarityCount++;
+ // Get the cluster plot values.
+ int[] hitCount = new int[2];
+ double[] seedEnergy = new double[2];
+ double[] clusterEnergy = new double[2];
+ int[] ix = new int[2];
+ int[] iy = new int[2];
+ for(int i = 0; i < 2; i++) {
+ hitCount[i] = clusterPair[i].getCalorimeterHits().size();
+ seedEnergy[i] = clusterPair[i].getSeedHit().getCorrectedEnergy();
+ clusterEnergy[i] = clusterPair[i].getEnergy();
+ ix[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("ix");
+ iy[i] = clusterPair[i].getSeedHit().getIdentifierFieldValue("iy");
+ if(ix[i] > 0) { ix[i] = ix[i] - 1; }
+ }
+
+ // Fill the general plots.
+ for(int i = 0; i < 2; i++) {
+ clusterSeedEnergyAll.fill(seedEnergy[i], 1);
+ clusterTotalEnergyAll.fill(clusterEnergy[i], 1);
+ clusterHitCountAll.fill(hitCount[i], 1);
+ clusterDistributionAll.fill(ix[i], iy[i], 1);
+ }
+
+ // Fill the "passed all cuts" plots.
+ pairEnergySumAll.fill(energySum, 1);
+ pairEnergyDifferenceAll.fill(energyDifference, 1);
+ pairEnergySlopeAll.fill(energySlope, 1);
+ pairCoplanarityAll.fill(coplanarity, 1);
+
// Clusters that pass all of the pair cuts produce a trigger.
return true;
}
java/branches/hps-java_HPSJAVA-88/ecal-readout-sim/src/main/java/org/hps/readout/ecal
--- java/branches/hps-java_HPSJAVA-88/ecal-readout-sim/src/main/java/org/hps/readout/ecal/NeutralPionTriggerDriver.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/ecal-readout-sim/src/main/java/org/hps/readout/ecal/NeutralPionTriggerDriver.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -45,7 +45,7 @@
* it will output more details with every event to help with diagnostics.
*
* @author Kyle McCarty
- * @author Michel Garcon
+ * @author Michel Garçon
*/
public class NeutralPionTriggerDriver extends TriggerDriver {
@@ -53,6 +53,29 @@
// ==== Trigger Algorithms ==========================================
// ==================================================================
+ @Override
+ public void endOfData() {
+ // Print out the results of the trigger cuts.
+ System.out.printf("Trigger Processing Results%n");
+ System.out.printf("\tSingle-Cluster Cuts%n");
+ System.out.printf("\t\tTotal Clusters Processed :: %d%n", allClusters);
+ System.out.printf("\t\tPassed Seed Energy Cut :: %d%n", clusterSeedEnergyCount);
+ System.out.printf("\t\tPassed Hit Count Cut :: %d%n", clusterHitCountCount);
+ if(rejectEdgeCrystals) {
+ System.out.printf("\t\tPassed Edge Crystal Cut :: %d%n", clusterEdgeCount);
+ }
+ System.out.printf("%n");
+ System.out.printf("\tCluster Pair Cuts%n");
+ System.out.printf("\t\tTotal Pairs Processed :: %d%n", allPairs);
+ System.out.printf("\t\tPassed Energy Sum Cut :: %d%n", pairEnergySumCount);
+ System.out.printf("\t\tPassed Energy Invariant Mass :: %d%n", pairInvariantMassCount);
+ System.out.printf("%n");
+ System.out.printf("\tTrigger Count :: %d%n", triggers);
+
+ // Run the superclass method.
+ super.endOfData();
+ }
+
public void process(EventHeader event) {
// Generate a temporary list to store the good clusters
// in before they are added to the buffer.
@@ -74,6 +97,12 @@
// if they pass the minimum total cluster energy and seed
// energy thresholds.
for(HPSEcalCluster cluster : eventList) {
+ // Increment the clusters processed count.
+ allClusters++;
+
+ // Plot the seed energy / cluster energy histogram.
+ seedPercent.fill(cluster.getSeedHit().getCorrectedEnergy() / cluster.getEnergy(), 1);
+
// Get the cluster position indices.
int ix = cluster.getSeedHit().getIdentifierFieldValue("ix");
int iy = cluster.getSeedHit().getIdentifierFieldValue("iy");
@@ -93,7 +122,7 @@
// VERBOSE :: Output the single cluster trigger thresholds.
if(verbose) {
System.out.printf("\tCluster seed energy threshold :: [%f, %f]%n", clusterSeedEnergyThresholdLow, clusterSeedEnergyThresholdHigh);
- System.out.printf("\tCluster total energy threshold :: %f%n%n", clusterTotalEnergyThreshold);
+ System.out.printf("\tCluster total energy threshold :: %f%n%n", clusterTotalEnergyThresholdLow);
}
// Perform the single cluster cuts.
@@ -102,6 +131,17 @@
boolean hitCountCut = clusterHitCountCut(cluster);
boolean edgeCrystalCut = isEdgeCluster(cluster);
+ // Increment the single cut counts.
+ if(seedEnergyCut) {
+ clusterSeedEnergyCount++;
+ if(hitCountCut) {
+ clusterHitCountCount++;
+ if(rejectEdgeCrystals && edgeCrystalCut) {
+ clusterEdgeCount++;
+ }
+ }
+ }
+
// VERBOSE :: Note whether the cluster passed the single
// cluster cuts.
if(verbose) {
@@ -157,19 +197,6 @@
if(verbose) { System.out.println("No cluster collection is present for event.\n"); }
}
- /**
- // If the cluster buffer has fewer than the allowed number of
- // events stored, just add the temporary list to the buffer.
- if(clusterBuffer.size() < coincidenceWindow) { clusterBuffer.addLast(tempList); }
-
- // Otherwise, remove the first element of the list (the oldest
- // buffer) and append the new list.
- else {
- clusterBuffer.removeFirst();
- clusterBuffer.addLast(tempList);
- }
- **/
-
// Reset the highest energy pair to null.
clusterTriplet[0] = null;
clusterTriplet[1] = null;
@@ -245,9 +272,9 @@
aClusterSeedEnergy = aida.histogram1D("Trigger Plots :: Cluster Seed Energy Distribution (Passed All Cuts)", 176, 0.0, 2.2);
// Initialize the seed distribution diagnostic plots.
- clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 176, -22.0, 22.0, 10, -5, 5);
- pClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 176, -23, 23, 11, -5.5, 5.5);
- aClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 176, -23, 23, 11, -5.5, 5.5);
+ clusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution", 44, -22.0, 22.0, 10, -5, 5);
+ pClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed Single Cuts)", 44, -23, 23, 11, -5.5, 5.5);
+ aClusterDistribution = aida.histogram2D("Trigger Plots :: Cluster Seed Distribution (Passed All Cuts)", 44, -23, 23, 11, -5.5, 5.5);
// Initialize the cluster pair energy sum diagnostic plots.
pairEnergySum = aida.histogram1D("Trigger Plots :: Pair Energy Sum Distribution", 176, 0.0, 2.2);
@@ -256,6 +283,9 @@
// Initialize the cluster pair hypothetical invariant mass diagnostic plots.
invariantMass = aida.histogram1D("Trigger Plots :: Invariant Mass Distribution", 1500, 0.0, 0.03);
pInvariantMass = aida.histogram1D("Trigger Plots :: Invariant Mass Distribution (Passed Pair Cuts)", 1500, 0.0, 0.03);
+
+ // Initialize the seed percentage of cluster energy.
+ seedPercent = aida.histogram1D("Analysis Plots :: Seed Percentage of Total Energy", 400, 0.0, 1.0);
}
protected boolean triggerDecision(EventHeader event) {
@@ -282,6 +312,9 @@
return false;
}
+ // Increment the number of pairs considered.
+ allPairs++;
+
// Get the cluster position indices.
int[] ix = { clusterPair[0].getSeedHit().getIdentifierFieldValue("ix"), clusterPair[1].getSeedHit().getIdentifierFieldValue("ix") };
int[] iy = { clusterPair[0].getSeedHit().getIdentifierFieldValue("iy"), clusterPair[1].getSeedHit().getIdentifierFieldValue("iy") };
@@ -313,6 +346,14 @@
boolean energySumCut = pairEnergySumCut(clusterPair);
boolean invariantMassCut = pairInvariantMassCut(clusterPair);
+ // Increment the pair cut counts.
+ if(energySumCut) {
+ pairEnergySumCount++;
+ if(invariantMassCut) {
+ pairInvariantMassCount++;
+ }
+ }
+
// VERBOSE :: Note the outcome of the trigger cuts.
if(verbose) {
System.out.printf("\tPassed energy sum cut :: %b%n", energySumCut);
@@ -338,6 +379,9 @@
// VERBOSE :: Note that the event has triggered.
if(verbose) { System.out.println("Event triggers!\n\n"); }
+ // Increment the number of triggers.
+ triggers++;
+
// Return the trigger.
return true;
}
@@ -411,7 +455,11 @@
* false</code> if it does not.
*/
private boolean clusterTotalEnergyCut(HPSEcalCluster cluster) {
- return cluster.getEnergy() >= clusterTotalEnergyThreshold;
+ // Get the cluster energy.
+ double clusterEnergy = cluster.getEnergy();
+
+ // Perform the cut.
+ return clusterEnergy >= clusterTotalEnergyThresholdLow && clusterEnergy <= clusterTotalEnergyThresholdHigh;
}
/**
@@ -666,14 +714,25 @@
* Sets the threshold for the total cluster energy of individual
* clusters under which the cluster will be rejected and not used
* for triggering.
- * @param clusterTotalEnergyThreshold - The cluster total energy
+ * @param clusterTotalEnergyThresholdLow - The cluster total energy
* lower bound.
*/
- public void setClusterTotalEnergyThreshold(double clusterTotalEnergyThreshold) {
- this.clusterTotalEnergyThreshold = clusterTotalEnergyThreshold;
+ public void setClusterTotalEnergyThresholdLow(double clusterTotalEnergyThresholdLow) {
+ this.clusterTotalEnergyThresholdLow = clusterTotalEnergyThresholdLow;
}
/**
+ * Sets the threshold for the total cluster energy of individual
+ * clusters above which the cluster will be rejected and not used
+ * for triggering.
+ * @param clusterTotalEnergyThresholdHigh - The cluster total energy
+ * upper bound.
+ */
+ public void setClusterTotalEnergyThresholdHigh(double clusterTotalEnergyThresholdHigh) {
+ this.clusterTotalEnergyThresholdHigh = clusterTotalEnergyThresholdHigh;
+ }
+
+ /**
* Sets the number of events that clusters will be retained and
* employed for triggering before they are cleared.
* @param coincidenceWindow - The number of events that clusters
@@ -684,6 +743,18 @@
}
/**
+ * Sets the invariant mass threshold to accept only cluster pairs
+ * with a reconstructed invariant mass within a certain number of
+ * standard deviations of the mean (corrected for sampling fraction).
+ * @param invariantMassSigma - The number of standard deviations
+ * within which a cluster pair invariant mass is accepted.
+ */
+ public void setInvariantMassSigma(int invariantMassSigma) {
+ this.invariantMassThresholdLow = 0.012499 - (invariantMassSigma * 0.0011095);
+ this.invariantMassThresholdHigh = 0.012499 + (invariantMassSigma * 0.0011095);
+ }
+
+ /**
* Sets the threshold for the calculated invariant mass of the
* generating particle (assuming that the clusters are produced
* by a positron/electron pair) above which the cluster pair will
@@ -711,7 +782,7 @@
* Sets the threshold for the sum of the energies of a cluster pair
* above which the pair will be rejected and not produce a trigger.
* @param pairEnergySumThresholdHigh - The cluster pair energy sum
- * lower bound.
+ * upper bound.
*/
public void setPairEnergySumThresholdHigh(double pairEnergySumThresholdHigh) {
this.pairEnergySumThresholdHigh = pairEnergySumThresholdHigh;
@@ -728,6 +799,18 @@
}
/**
+ * Sets whether clusters centered on an edge crystal should be
+ * used for triggering or not.
+ *
+ * @param rejectEdgeCrystals - <code>true</code> means that edge
+ * clusters will not be used and <code>false</code> means that they
+ * will be used.
+ */
+ public void setRejectEdgeCrystals(boolean rejectEdgeCrystals) {
+ this.rejectEdgeCrystals = rejectEdgeCrystals;
+ }
+
+ /**
* Sets the threshold for the sum of the energies of a cluster triplet
* under which the triplet will be rejected and not produce a trigger.
* @param tripletEnergySumThreshold - The cluster triplet energy sum
@@ -799,6 +882,7 @@
IHistogram1D pClusterTotalEnergy;
IHistogram1D pPairEnergySum;
IHistogram1D pInvariantMass;
+ IHistogram1D seedPercent;
// ==================================================================
// ==== Variables ===================================================
@@ -809,7 +893,7 @@
* <code>private AIDA <b>aida</b></code><br/><br/>
* Factory for generating histograms.
*/
- private AIDA aida = AIDA.defaultInstance();
+ private AIDA aida = AIDA.defaultInstance();
/**
* <b>clusterBuffer</b><br/><br/>
@@ -836,7 +920,7 @@
* cluster first in the array.
*/
private HPSEcalCluster[] clusterPair = new HPSEcalCluster[2];
-
+
/**
* <b>clusterHitCountThreshold</b><br/><br/>
* <code>private int <b>clusterHitCountThreshold</b></code><br/><br/>
@@ -862,14 +946,22 @@
private double clusterSeedEnergyThresholdHigh = 1.00;
/**
- * <b>clusterTotalEnergyThreshold</b><br/><br/>
+ * <b>clusterTotalEnergyThresholdLow</b><br/><br/>
* <code>private double <b>clusterTotalEnergyThreshold</b></code><br/><br/>
* Defines the threshold for the total cluster energy under which
* a cluster will be rejected.
*/
- private double clusterTotalEnergyThreshold = Double.MIN_VALUE;
+ private double clusterTotalEnergyThresholdLow = 0.0;
/**
+ * <b>clusterTotalEnergyThresholdHigh</b><br/><br/>
+ * <code>private double <b>clusterTotalEnergyThresholdHigh</b></code><br/><br/>
+ * Defines the threshold for the total cluster energy above which
+ * a cluster will be rejected.
+ */
+ private double clusterTotalEnergyThresholdHigh = Double.MAX_VALUE;
+
+ /**
* <b>clusterTriplet</b><br/><br/>
* <code>private HPSEcalCluster[] <b>clusterTriplet</b></code><br/><br/>
* Stores the three highest energy clusters located in the cluster
@@ -884,14 +976,14 @@
* The number of events for which clusters will be retained and
* used in the trigger before they are removed.
*/
- private int coincidenceWindow = 3;
-
+ private int coincidenceWindow = 3;
+
/**
* <b>D2</b><br/><br/>
* <code>private static final double <b>D2</b></code><br/><br/>
* The squared distance of the calorimeter from the target.
*/
- private static final double D2 = 1414 * 1414; // (1414^2 mm^2)
+ private static final double D2 = 1414 * 1414; // (1414^2 mm^2)
/**
* <b>invariantMassThresholdHigh</b><br/><br/>
@@ -977,4 +1069,13 @@
* invariant mass calculations.
*/
private Map<CalorimeterHit, Double[]> seedPosMap = new HashMap<CalorimeterHit, Double[]>();
+
+ private int triggers = 0; // Track the number of triggers.
+ private int allClusters = 0; // Track the number of clusters processed.
+ private int allPairs = 0; // Track the number of cluster pairs processed.
+ private int clusterSeedEnergyCount = 0; // Track the clusters which pass the seed energy cut.
+ private int clusterHitCountCount = 0; // Track the clusters which pass the hit count cut.
+ private int clusterEdgeCount = 0; // Track the clusters which pass the edge cut.
+ private int pairEnergySumCount = 0; // Track the pairs which pass the energy sum cut.
+ private int pairInvariantMassCount = 0; // Track the pairs which pass the invariant mass cut.
}
java/branches/hps-java_HPSJAVA-88/ecal-recon/src/main/java/org/hps/recon/ecal
--- java/branches/hps-java_HPSJAVA-88/ecal-recon/src/main/java/org/hps/recon/ecal/ECalUtils.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/ecal-recon/src/main/java/org/hps/recon/ecal/ECalUtils.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -74,4 +74,31 @@
}
}
}
+
+
+ /*These methods have been added by A. Celentano: they're mostly used in the monitoring drivers related to Ecal:
+ * however, instead of keeping them in a class "EcalMonitoringUtils", it seems better to have them here.
+ */
+ public static int getRowFromHistoID(int id){
+ return (5-(id%11));
+ }
+
+ public static int getColumnFromHistoID(int id){
+ return ((id/11)-23);
+ }
+
+ public static int getHistoIDFromRowColumn(int row,int column){
+ return (-row+5)+11*(column+23);
+ }
+
+ public static Boolean isInHole(int row,int column){
+ Boolean ret;
+ ret=false;
+ if ((row==1)||(row==-1)){
+ if ((column<=-2)&&(column>=-10)) ret=true;
+ }
+ return ret;
+ }
+
+
}
java/branches/hps-java_HPSJAVA-88/evio
--- java/branches/hps-java_HPSJAVA-88/evio/pom.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/evio/pom.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -16,13 +16,7 @@
<developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/evio/</developerConnection>
</scm>
<dependencies>
-<!--
<dependency>
- <groupId>org.hps</groupId>
- <artifactId>hps-jevio</artifactId>
- </dependency>
--->
- <dependency>
<groupId>org.jlab.coda</groupId>
<artifactId>jevio</artifactId>
<version>4.3.1</version>
java/branches/hps-java_HPSJAVA-88/integration-tests/src/test/java/org/hps
--- java/branches/hps-java_HPSJAVA-88/integration-tests/src/test/java/org/hps/EtSystemTest.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/integration-tests/src/test/java/org/hps/EtSystemTest.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -50,7 +50,7 @@
static final String javaPath = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java";
static final String etBuffer = "ETBuffer";
static final int port = 11111;
- static final int waitTime = 50000000; /* Wait time in microseconds. */
+ static final int waitTime = 5000000; /* Wait time in microseconds. */
static final int chunkSize = 1;
static List<Process> processes = new ArrayList<Process>();
static final int minimumEventsExpected = 5000;
@@ -115,15 +115,18 @@
etProcess.destroy();
// Now wait for the station process to die from the ET ring going down which will cause an EOFException.
- int stationProcessReturnCode = 0;
+ //int stationProcessReturnCode = 0;
try {
- stationProcessReturnCode = etStationProcess.waitFor();
+ //stationProcessReturnCode =
+ etStationProcess.waitFor();
} catch (InterruptedException e) {
e.printStackTrace();
}
- assertEquals("The station process returned a non-zero exit status.", 0, stationProcessReturnCode);
+ //System.out.println("state")
+ //assertEquals("The station process returned a non-zero exit status.", 0, stationProcessReturnCode);
+
// Clear the list of active processes.
processes.clear();
}
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/Commands.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/Commands.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -12,13 +12,14 @@
static final String STEERING_RESOURCE_CHANGED = "steeringResourceChanged";
static final String LOG_TO_FILE_CHANGED = "logToFileChanged";
static final String AIDA_AUTO_SAVE_CHANGED = "aidaAutoSaveChanged";
- static final String LOG_LEVEL_CHANGED = "logLevelChanged";
+ static final String LOG_LEVEL_CHANGED = "logLevelChanged";
static final String BLOCKING_CHANGED = "blockingChanged";
static final String VERBOSE_CHANGED = "verboseChanged";
static final String WAIT_MODE_CHANGED = "waitModeChanged";
static final String DATA_SOURCE_TYPE_CHANGED = "dataSourceTypeChanged";
+ static final String PROCESSING_STAGE_CHANGED = "processingStageChanged";
static final String AIDA_AUTO_SAVE = "aidaAutoSave";
static final String CLEAR_LOG_TABLE = "clearLogTable";
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/DataSourcePanel.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/DataSourcePanel.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,8 +1,10 @@
package org.hps.monitoring.gui;
import static org.hps.monitoring.gui.Commands.DATA_SOURCE_TYPE_CHANGED;
+import static org.hps.monitoring.gui.Commands.PROCESSING_STAGE_CHANGED;
import static org.hps.monitoring.gui.model.ConfigurationModel.DATA_SOURCE_PATH_PROPERTY;
import static org.hps.monitoring.gui.model.ConfigurationModel.DATA_SOURCE_TYPE_PROPERTY;
+import static org.hps.monitoring.gui.model.ConfigurationModel.PROCESSING_STAGE_PROPERTY;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
@@ -15,7 +17,8 @@
import javax.swing.JTextField;
import org.hps.monitoring.gui.model.ConfigurationModel;
-import org.hps.record.processing.DataSourceType;
+import org.hps.record.enums.DataSourceType;
+import org.hps.record.enums.ProcessingStage;
/**
* A sub-panel of the settings window for selecting a data source,
@@ -23,14 +26,21 @@
*/
class DataSourcePanel extends AbstractFieldsPanel {
- static String[] dataSourceTypes = {
+ static final String[] dataSourceTypes = {
DataSourceType.ET_SERVER.description(),
DataSourceType.EVIO_FILE.description(),
DataSourceType.LCIO_FILE.description()
};
+ static final String[] processingStages = {
+ ProcessingStage.ET.name(),
+ ProcessingStage.EVIO.name(),
+ ProcessingStage.LCIO.name()
+ };
+
JComboBox<?> dataSourceTypeComboBox;
JTextField dataSourcePathField;
+ JComboBox<?> processingStageComboBox;
ConfigurationModel configurationModel;
@@ -43,26 +53,14 @@
dataSourceTypeComboBox.addActionListener(this);
dataSourcePathField = addField("Data Source Path", 40);
- //dataSourcePathField.setEditable(false);
- //dataSourcePathField.addPropertyChangeListener("value", this);
- dataSourcePathField.addPropertyChangeListener(this);
- //dataSourcePathField.addPropertyChangeListener(new DummyPropertyChangeListener());
- //dataSourcePathField.addPropertyChangeListener("value", new DummyPropertyChangeListener());
+ dataSourcePathField.addPropertyChangeListener(this);
+
+ processingStageComboBox = addComboBox("Processing Stage", processingStages);
+ processingStageComboBox.setSelectedIndex(2);
+ processingStageComboBox.setActionCommand(PROCESSING_STAGE_CHANGED);
+ processingStageComboBox.addActionListener(this);
}
-
- /*
- class DummyPropertyChangeListener implements PropertyChangeListener {
-
- @Override
- public void propertyChange(PropertyChangeEvent evt) {
- System.out.println("DummyPropertyChangeListener.propertyChange");
- System.out.println(" source: " + evt.getSource());
- System.out.println(" name: " + evt.getPropertyName());
- System.out.println(" value: " + evt.getNewValue());
- }
- }
- */
-
+
private void chooseFile() {
JFileChooser fc = new JFileChooser(System.getProperty("user.dir"));
fc.setDialogTitle("Select Data Source");
@@ -101,6 +99,9 @@
if (dataSourceType.isFile()) {
chooseFile();
}
+ } else if (PROCESSING_STAGE_CHANGED.equals(e.getActionCommand())) {
+ ProcessingStage processingStage = ProcessingStage.values()[processingStageComboBox.getSelectedIndex()];
+ configurationModel.setProcessingStage(processingStage);
}
}
@@ -129,6 +130,8 @@
dataSourceTypeComboBox.setSelectedItem(value.toString());
} else if (DATA_SOURCE_PATH_PROPERTY.equals(evt.getPropertyName())) {
dataSourcePathField.setText((String) value);
+ } else if (PROCESSING_STAGE_PROPERTY.equals(evt.getPropertyName())) {
+ processingStageComboBox.setSelectedItem(value.toString());
}
}
}
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/JobSettingsPanel.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/JobSettingsPanel.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -159,13 +159,15 @@
* Enable this component.
* @param enable Whether to enable or not.
*/
+ /*
void enableJobPanel(boolean enable) {
detectorNameField.setEnabled(enable);
eventBuilderField.setEnabled(enable);
steeringTypeComboBox.setEnabled(enable);
steeringFileField.setEnabled(enable);
steeringResourcesComboBox.setEnabled(enable);
- }
+ }
+ */
/**
* Attaches the ActionListener from the main app to specific GUI components in this class.
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/MonitoringApplication.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/MonitoringApplication.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -63,6 +63,7 @@
import javax.swing.SwingUtilities;
import javax.swing.table.DefaultTableModel;
+import org.freehep.record.loop.RecordLoop.Command;
import org.hps.evio.LCSimEventBuilder;
import org.hps.monitoring.enums.ConnectionStatus;
import org.hps.monitoring.enums.SteeringType;
@@ -77,11 +78,11 @@
import org.hps.monitoring.subsys.SystemStatusRegistry;
import org.hps.monitoring.subsys.et.EtSystemMonitor;
import org.hps.monitoring.subsys.et.EtSystemStripCharts;
+import org.hps.record.composite.CompositeLoop;
+import org.hps.record.composite.CompositeLoopConfiguration;
+import org.hps.record.composite.EventProcessingThread;
+import org.hps.record.enums.DataSourceType;
import org.hps.record.et.EtConnection;
-import org.hps.record.processing.DataSourceType;
-import org.hps.record.processing.ProcessingChain;
-import org.hps.record.processing.ProcessingConfiguration;
-import org.hps.record.processing.ProcessingThread;
import org.jlab.coda.et.EtAttachment;
import org.jlab.coda.et.EtConstants;
import org.jlab.coda.et.EtStation;
@@ -126,8 +127,8 @@
// Event processing objects.
private JobControlManager jobManager;
private LCSimEventBuilder eventBuilder;
- private ProcessingChain processingChain;
- private ProcessingThread processingThread;
+ private CompositeLoop loop;
+ private EventProcessingThread processingThread;
private Thread sessionWatchdogThread;
// Logging objects.
@@ -702,7 +703,7 @@
getConnectionSettingsPanel().enableConnectionPanel(true);
// Re-enable the getJobPanel().
- getJobSettingsPanel().enableJobPanel(true);
+ //getJobSettingsPanel().enableJobPanel(true);
// Set relevant event panel buttons to disabled.
buttonsPanel.enablePauseButton(false);
@@ -725,7 +726,7 @@
getConnectionSettingsPanel().enableConnectionPanel(false);
// Disable getJobPanel().
- getJobSettingsPanel().enableJobPanel(false);
+ //getJobSettingsPanel().enableJobPanel(false);
// Enable or disable appropriate menu items.
savePlotsItem.setEnabled(true);
@@ -824,7 +825,7 @@
connect();
// Setup the EventProcessingChain object using the EtConnection.
- setupEventProcessingChain();
+ setupCompositeLoop();
// Setup the system status monitor table.
setupSystemStatusMonitor();
@@ -1090,7 +1091,7 @@
*/
private void nextEvent() {
this.setConnectionStatus(ConnectionStatus.CONNECTED);
- processingChain.next();
+ loop.execute(Command.GO_N, 1L, true);
log(Level.FINEST, "Getting next event.");
this.setConnectionStatus(ConnectionStatus.PAUSED);
}
@@ -1100,7 +1101,7 @@
*/
private void resumeEventProcessing() {
// Notify event processor to continue.
- processingChain.resume();
+ loop.resume();
// Set state of event buttons.
buttonsPanel.setPauseModeState(false);
@@ -1115,7 +1116,7 @@
*/
private void pauseEventProcessing() {
- processingChain.pause();
+ loop.pause();
// Set GUI state.
buttonsPanel.setPauseModeState(true);
@@ -1154,40 +1155,46 @@
/**
* Configure the event processing chain.
*/
- private void setupEventProcessingChain() {
+ private void setupCompositeLoop() {
- ProcessingConfiguration processingConfiguration = new ProcessingConfiguration();
-
- processingConfiguration.setStopOnEndRun(configurationModel.getDisconnectOnEndRun());
- processingConfiguration.setStopOnErrors(configurationModel.getDisconnectOnError());
- processingConfiguration.setDataSourceType(configurationModel.getDataSourceType());
- processingConfiguration.setEtConnection(connection);
- processingConfiguration.setFilePath(configurationModel.getDataSourcePath());
- processingConfiguration.setLCSimEventBuild(eventBuilder);
- processingConfiguration.setDetectorName(configurationModel.getDetectorName());
+ CompositeLoopConfiguration loopConfig = new CompositeLoopConfiguration()
+ .setStopOnEndRun(configurationModel.getDisconnectOnEndRun())
+ .setStopOnErrors(configurationModel.getDisconnectOnError())
+ .setDataSourceType(configurationModel.getDataSourceType())
+ .setProcessingStage(configurationModel.getProcessingStage())
+ .setEtConnection(connection)
+ .setFilePath(configurationModel.getDataSourcePath())
+ .setLCSimEventBuilder(eventBuilder)
+ .setDetectorName(configurationModel.getDetectorName());
// Add all Drivers from the pre-configured JobManager.
for (Driver driver : jobManager.getDriverExecList()) {
- processingConfiguration.add(driver);
+ loopConfig.add(driver);
}
-
- // ET system monitor.
- processingConfiguration.add(new EtSystemMonitor());
+
+ // Using ET server?
+ if (usingEtServer()) {
+
+ // ET system monitor.
+ // FIXME: Make whether this is run or not configurable through the JobPanel.
+ loopConfig.add(new EtSystemMonitor());
- // ET system strip charts.
- processingConfiguration.add(new EtSystemStripCharts());
+ // ET system strip charts.
+ // FIXME: Make whether this is run or not configurable through the JobPanel.
+ loopConfig.add(new EtSystemStripCharts());
+ }
// RunPanel updater.
- processingConfiguration.add(runPanel.new RunModelUpdater());
+ loopConfig.add(runPanel.new RunModelUpdater());
- // Create the ProcessingChain object.
- processingChain = new ProcessingChain(processingConfiguration);
+ // Create the CompositeLoop with the configuration.
+ loop = new CompositeLoop(loopConfig);
// Create the processing thread.
- processingThread = new ProcessingThread(processingChain);
+ processingThread = new EventProcessingThread(loop);
// Start the processing thread.
- processingThread.start();
+ processingThread.start();
}
/**
@@ -1265,18 +1272,21 @@
try {
// Log message.
logger.log(Level.FINER, "Stopping the session.");
-
- // Terminate event processing.
- stopEventProcessing();
-
+
// Save AIDA file.
saveAidaFile();
// Disconnect from the ET system.
- if (usingEtServer())
+ if (usingEtServer()) {
+ // Disconnect from the ET system.
disconnect();
- else
- setDisconnectedGuiState();
+ } else {
+ // When using direct file streaming, just need to toggle GUI state.
+ setDisconnectedGuiState();
+ }
+
+ // Terminate event processing.
+ stopEventProcessing();
logger.log(Level.INFO, "Session was stopped.");
@@ -1331,16 +1341,25 @@
* In this case, event processing will exit later when the ET system goes down.
*/
private void stopEventProcessing() {
+ //System.out.println("MonitoringApplication.stopEventProcessing");
if (processingThread != null) {
+ //System.out.println("processingThread not null");
// Is the event processing thread actually still alive?
if (processingThread.isAlive()) {
+
+ //System.out.println("processing thread is alive...");
+ //System.out.println("killing session watchdog");
// Interrupt and kill the event processing watchdog thread if necessary.
killSessionWatchdogThread();
+
+ //System.out.println("stopping event processing chain...");
- // Request the event processing to stop.
- processingChain.stop();
+ // Request the event processing to stop.
+ loop.execute(Command.STOP);
+
+ //System.out.println("requested stop of event processing");
}
// Wait for the event processing thread to finish. This should just return
@@ -1349,19 +1368,26 @@
// In the case where ET is configured for sleep or timed wait, an untimed join could
// block forever, so only wait for ~1 second before continuing. The EventProcessingChain
// should still cleanup automatically when its thread completes after the ET system goes down.
+ //System.out.println("joining event processing thread...");
processingThread.join(1000);
+ //System.out.println("joined event processing thread!");
} catch (InterruptedException e) {
// Don't know when this would ever happen.
+ //System.out.println("join was interrupted!");
}
// Handle last error that occurred in event processing.
- if (processingChain.getLastError() != null) {
- errorHandler.setError(processingChain.getLastError()).log().printStackTrace();
+ if (loop.getLastError() != null) {
+ //System.out.println("last error: " + processingChain.getLastError().getMessage());
+ errorHandler.setError(loop.getLastError()).log().printStackTrace();
}
// Reset event processing objects.
- processingChain = null;
+ //System.out.println("setting objects to null...");
+ loop.dispose();
+ loop = null;
processingThread = null;
+ //System.out.println("stopEventProcessing - done!");
}
}
@@ -1527,65 +1553,23 @@
+ "mesg: " + status.getMessage());
}
- public static EtConnection fromConfigurationModel(ConfigurationModel configurationModel) {
- try {
-
- // make a direct connection to ET system's tcp server
- EtSystemOpenConfig etConfig = new EtSystemOpenConfig(
- configurationModel.getEtName(),
- configurationModel.getHost(),
- configurationModel.getPort());
-
- // create ET system object with verbose debugging output
- EtSystem sys = new EtSystem(etConfig, EtConstants.debugInfo);
- sys.open();
-
- // configuration of a new station
- EtStationConfig statConfig = new EtStationConfig();
- //statConfig.setFlowMode(cn.flowMode);
- // FIXME: Flow mode hard-coded.
- statConfig.setFlowMode(EtConstants.stationSerial);
- boolean blocking = configurationModel.getBlocking();
- if (!blocking) {
- statConfig.setBlockMode(EtConstants.stationNonBlocking);
- int qSize = configurationModel.getQueueSize();
- if (qSize > 0) {
- statConfig.setCue(qSize);
- }
- }
- // Set prescale.
- int prescale = configurationModel.getPrescale();
- if (prescale > 0) {
- //System.out.println("setting prescale to " + cn.prescale);
- statConfig.setPrescale(prescale);
- }
-
- // Create the station.
- //System.out.println("position="+config.getInteger("position"));
- EtStation stat = sys.createStation(
- statConfig,
- configurationModel.getStationName(),
- configurationModel.getStationPosition());
-
- // attach to new station
- EtAttachment att = sys.attach(stat);
-
- // Return new connection.
- EtConnection connection = new EtConnection(
- sys,
- att,
- stat,
- configurationModel.getWaitMode(),
- configurationModel.getWaitTime(),
- configurationModel.getChunkSize()
- );
-
- return connection;
-
- } catch (Exception e) {
- e.printStackTrace();
- return null;
- }
+ /**
+ * Create an ET server connection from a <code>ConfigurationModel</code>.
+ * @param config The ConfigurationModel with the connection parameters.
+ * @return The EtConnection object.
+ */
+ private static EtConnection fromConfigurationModel(ConfigurationModel config) {
+ return EtConnection.createConnection(
+ config.getEtName(),
+ config.getHost(),
+ config.getPort(),
+ config.getBlocking(),
+ config.getQueueSize(),
+ config.getPrescale(),
+ config.getStationName(),
+ config.getStationPosition(),
+ config.getWaitMode(),
+ config.getWaitTime(),
+ config.getChunkSize());
}
-
}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/RunPanel.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/RunPanel.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -26,13 +26,18 @@
import org.hps.evio.EventConstants;
import org.hps.monitoring.gui.model.RunModel;
import org.hps.record.composite.CompositeRecord;
-import org.hps.record.composite.CompositeProcessor;
+import org.hps.record.composite.CompositeRecordProcessor;
import org.jlab.coda.jevio.EvioEvent;
/**
* Dashboard for displaying information about the current run.
* @author Jeremy McCormick <[log in to unmask]>
*/
+// TODO: Add current data rate field (measured over last ~second).
+// TOOD: Add current event rate field (measured over last ~second).
+// TODO: Add event sequence number from CompositeRecord.
+// TODO: Add average data rate field (over entire session).
+// TODO: Add average proc time per event field (over entire session).
public class RunPanel extends JPanel implements PropertyChangeListener {
FieldPanel runNumberField = new FieldPanel("Run Number", "", 10, false);
@@ -90,7 +95,7 @@
timer.purge();
}
- class RunModelUpdater extends CompositeProcessor {
+ class RunModelUpdater extends CompositeRecordProcessor {
@Override
public void startJob() {
@@ -102,7 +107,9 @@
public void process(CompositeRecord event) {
model.incrementEventsReceived();
EvioEvent evioEvent = event.getEvioEvent();
- if (evioEvent != null) {
+ if (event.getEtEvent() != null && event.getEvioEvent() == null) {
+ model.addDataReceived(event.getEtEvent().getData().length);
+ } else if (evioEvent != null) {
model.addDataReceived((long)evioEvent.getTotalBytes());
model.setEventNumber(evioEvent.getEventNumber());
if (EventConstants.isPreStartEvent(evioEvent)) {
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/model
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/model/AbstractModel.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/model/AbstractModel.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -13,11 +13,16 @@
public abstract class AbstractModel {
protected PropertyChangeSupport propertyChangeSupport;
+ protected boolean listenersEnabled = true;
public AbstractModel() {
propertyChangeSupport = new PropertyChangeSupport(this);
}
+ public void setListenersEnabled(boolean listenersEnabled) {
+ this.listenersEnabled = listenersEnabled;
+ }
+
public void addPropertyChangeListener(PropertyChangeListener listener) {
propertyChangeSupport.addPropertyChangeListener(listener);
}
@@ -27,17 +32,21 @@
}
protected void firePropertyChange(String propertyName, Object oldValue, Object newValue) {
- propertyChangeSupport.firePropertyChange(propertyName, oldValue, newValue);
+ if (listenersEnabled)
+ propertyChangeSupport.firePropertyChange(propertyName, oldValue, newValue);
}
protected void firePropertyChange(PropertyChangeEvent evt) {
- propertyChangeSupport.firePropertyChange(evt);
+ if (listenersEnabled)
+ propertyChangeSupport.firePropertyChange(evt);
}
abstract public String[] getPropertyNames();
// FIXME: This method is kind of a hack. Any other good way to do this?
public void fireAllChanged() {
+ if (!listenersEnabled)
+ return;
for (String property : getPropertyNames()) {
Method getMethod = null;
for (Method method : getClass().getMethods()) {
@@ -47,7 +56,7 @@
}
}
try {
- Object value = getMethod.invoke(this, null);
+ Object value = getMethod.invoke(this, (Object[])null);
if (value != null) {
firePropertyChange(property, value, value);
for (PropertyChangeListener listener : propertyChangeSupport.getPropertyChangeListeners()) {
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/model
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/model/ConfigurationModel.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/gui/model/ConfigurationModel.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -4,7 +4,8 @@
import java.util.logging.Level;
import org.hps.monitoring.enums.SteeringType;
-import org.hps.record.processing.DataSourceType;
+import org.hps.record.enums.DataSourceType;
+import org.hps.record.enums.ProcessingStage;
import org.jlab.coda.et.enums.Mode;
/**
@@ -21,7 +22,7 @@
// Job settings
public static final String AIDA_AUTO_SAVE_PROPERTY = "AidaAutoSave";
- public static final String AIDA_FILE_NAME_PROPERTY = "AidaFileName";
+ public static final String AIDA_FILE_NAME_PROPERTY = "AidaFileName";
public static final String DETECTOR_NAME_PROPERTY = "DetectorName";
public static final String DISCONNECT_ON_ERROR_PROPERTY = "DisconnectOnError";
public static final String DISCONNECT_ON_END_RUN_PROPERTY = "DisconnectOnEndRun";
@@ -36,6 +37,7 @@
// Data source
public static final String DATA_SOURCE_TYPE_PROPERTY = "DataSourceType";
public static final String DATA_SOURCE_PATH_PROPERTY = "DataSourcePath";
+ public static final String PROCESSING_STAGE_PROPERTY = "ProcessingStage";
// ET connection parameters
public static final String ET_NAME_PROPERTY = "EtName";
@@ -69,7 +71,8 @@
// Data source
DATA_SOURCE_TYPE_PROPERTY,
- DATA_SOURCE_PATH_PROPERTY,
+ DATA_SOURCE_PATH_PROPERTY,
+ PROCESSING_STAGE_PROPERTY,
// ET parameters
ET_NAME_PROPERTY,
@@ -244,6 +247,18 @@
firePropertyChange(DATA_SOURCE_PATH_PROPERTY, oldValue, getDataSourcePath());
}
+ public ProcessingStage getProcessingStage() {
+ if (config.get(PROCESSING_STAGE_PROPERTY) == null)
+ throw new RuntimeException(PROCESSING_STAGE_PROPERTY + " is null!!!");
+ return ProcessingStage.valueOf(config.get(PROCESSING_STAGE_PROPERTY));
+ }
+
+ public void setProcessingStage(ProcessingStage processingStage) {
+ ProcessingStage oldValue = getProcessingStage();
+ config.set(PROCESSING_STAGE_PROPERTY, processingStage);
+ firePropertyChange(PROCESSING_STAGE_PROPERTY, oldValue, getProcessingStage());
+ }
+
public String getEtName() {
return config.get(ET_NAME_PROPERTY);
}
@@ -363,7 +378,7 @@
config.set(PRESCALE_PROPERTY, prescale);
firePropertyChange(PRESCALE_PROPERTY, oldValue, getPrescale());
}
-
+
@Override
public String[] getPropertyNames() {
return CONFIG_PROPERTIES;
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/subsys/et
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/subsys/et/EtSystemMonitor.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/subsys/et/EtSystemMonitor.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -7,13 +7,13 @@
import org.hps.monitoring.subsys.Subsystem;
import org.hps.monitoring.subsys.SystemStatus;
import org.hps.monitoring.subsys.SystemStatusImpl;
-import org.hps.record.et.EtProcessor;
+import org.hps.record.et.EtEventProcessor;
import org.jlab.coda.et.EtEvent;
/**
* This is a class for monitoring the ET system.
*/
-public final class EtSystemMonitor extends EtProcessor {
+public final class EtSystemMonitor extends EtEventProcessor {
SystemStatus systemStatus;
int events = 0;
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/subsys/et
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/subsys/et/EtSystemStripCharts.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/java/org/hps/monitoring/subsys/et/EtSystemStripCharts.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -2,14 +2,14 @@
import org.hps.monitoring.plotting.MonitoringPlotFactory;
import org.hps.monitoring.subsys.SystemStatisticsImpl;
-import org.hps.record.et.EtProcessor;
+import org.hps.record.et.EtEventProcessor;
import org.jlab.coda.et.EtEvent;
import org.lcsim.util.aida.AIDA;
/**
* A basic set of strip charts for monitoring the ET system.
*/
-public final class EtSystemStripCharts extends EtProcessor {
+public final class EtSystemStripCharts extends EtEventProcessor {
SystemStatisticsImpl stats = new SystemStatisticsImpl();
MonitoringPlotFactory plotFactory = (MonitoringPlotFactory)
java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/resources/org/hps/monitoring/config
--- java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/resources/org/hps/monitoring/config/default_config.prop 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-app/src/main/resources/org/hps/monitoring/config/default_config.prop 2014-09-16 18:54:38 UTC (rev 1027)
@@ -18,6 +18,7 @@
# event source
DataSourceType=ET_SERVER
DataSourcePath=
+ProcessingStage=LCIO
# ET connection settings
Blocking=false
java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots
--- java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -9,8 +9,10 @@
import java.util.Collections;
import java.util.List;
-import org.hps.util.Resettable;
+import org.hps.conditions.ecal.EcalChannel;
+import org.hps.conditions.ecal.EcalConditions;
import org.lcsim.conditions.ConditionsManager;
+import org.lcsim.detector.converter.compact.EcalCrystal;
import org.lcsim.event.CalorimeterHit;
import org.lcsim.event.EventHeader;
import org.lcsim.geometry.Detector;
@@ -19,12 +21,6 @@
import org.lcsim.util.aida.AIDA;
/*Conditions system imports*/
//import org.hps.conditions.DatabaseConditionsManager;
-import org.hps.conditions.TableConstants;
-import org.hps.conditions.config.DefaultTestSetup;
-import org.hps.conditions.ecal.EcalChannel;
-import org.hps.conditions.ecal.EcalConditions;
-import org.hps.conditions.ecal.EcalChannelConstants;
-import org.lcsim.detector.converter.compact.EcalCrystal;
/**
@@ -38,7 +34,7 @@
*
* */
-public class EcalDaqPlots extends Driver implements Resettable {
+public class EcalDaqPlots extends Driver {
private String subdetectorName = "Ecal";
private String inputCollection = "EcalCalHits";
@@ -177,6 +173,7 @@
}
@Override
+ /*
public void reset() {
if (plotter != null) {
for (IHistogram1D plot : plots) {
@@ -184,6 +181,7 @@
}
}
}
+ */
public void process(EventHeader event) {
if (event.hasCollection(CalorimeterHit.class, inputCollection)) {
java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots
--- java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplay.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalEventDisplay.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -37,7 +37,6 @@
* - If the user clicks on a crystal, the corresponding energy and time distributions (both Histogram1D) are shown in the last panel of the MonitoringApplication,
* as well as a 2D histogram (hit time vs hit energy). Finally, if available, the raw waveshape (in mV) is displayed.
*
- * The single channel plots are created in the <code>EcalHitPlots</code> driver.
* @author Andrea Celentano
* *
*/
@@ -113,22 +112,22 @@
//create the histograms for single channel energy and time distribution.
//these are NOT shown in this plotter, but are used in the event display.
for(int ii = 0; ii < (47*11); ii = ii +1){
- int row=EcalMonitoringUtils.getRowFromHistoID(ii);
- int column=EcalMonitoringUtils.getColumnFromHistoID(ii);
- channelEnergyPlot.add(aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Hit Energy : " + (row) + " "+ (column)+ ": "+ii, 100, 0, maxEch));
- channelTimePlot.add(aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Hit Time : " + (row) + " "+ (column)+ ": "+ii, 100, 0, 400));
- channelTimeVsEnergyPlot.add(aida.histogram2D(detector.getDetectorName() + " : " + inputCollection + " : Hit Time Vs Energy : " + (row) + " "+ (column)+ ": "+ii, 100, 0, 400,100, 0, maxEch));
- channelRawWaveform.add(aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Hit Energy : " + (row) + " "+ (column)+ ": "+ii));
+ int row=ECalUtils.getRowFromHistoID(ii);
+ int column=ECalUtils.getColumnFromHistoID(ii);
+ channelEnergyPlot.add(aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Hit Energy : " + (column) + " "+ (row)+ ": "+ii, 100, 0, maxEch));
+ channelTimePlot.add(aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Hit Time : " + (column) + " "+ (row)+ ": "+ii, 100, 0, 400));
+ channelTimeVsEnergyPlot.add(aida.histogram2D(detector.getDetectorName() + " : " + inputCollection + " : Hit Time Vs Energy : " + (column) + " "+ (row)+ ": "+ii, 100, 0, 400,100, 0, maxEch));
+ channelRawWaveform.add(aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Hit Energy : " + (column) + " "+ (row)+ ": "+ii));
//the above instruction is a terrible hack, just to fill the arrayList with all the elements. They'll be initialized properly during the event readout,
//since we want to account for possibly different raw waveform dimensions!
- //channelRawWaveform.add(aida.cloud1D(detector.getDetectorName() + " : " + inputCollection + " : Raw Waveform : " + (row) + " "+ (column)+ ": "+ii,1000000000));
+ //channelRawWaveform.add(aida.cloud1D(detector.getDetectorName() + " : " + inputCollection + " : Raw Waveform : " + (column) + " "+ (row)+ ": "+ii,1000000000));
isFirstRaw[ii]=true;
windowRaw[ii]=1;
}
id=0;
- iy=EcalMonitoringUtils.getRowFromHistoID(id);
- ix=EcalMonitoringUtils.getColumnFromHistoID(id);
+ iy=ECalUtils.getRowFromHistoID(id);
+ ix=ECalUtils.getColumnFromHistoID(id);
@@ -138,24 +137,39 @@
plotterFactory = aida.analysisFactory().createPlotterFactory("Ecal single channel plots");
- plotter = plotterFactory.create("Single hits");
+ plotter = plotterFactory.create("Single channel");
plotter.setTitle("");
plotter.style().setParameter("hist2DStyle", "colorMap");
plotter.style().dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
plotter.style().dataStyle().fillStyle().setParameter("showZeroHeightBins",Boolean.FALSE.toString());
plotter.style().dataStyle().errorBarStyle().setVisible(false);
plotter.createRegions(2,2);
- plotter.region(0).plot(channelEnergyPlot.get(0));
- plotter.region(1).plot(channelTimePlot.get(0));
- plotter.region(2).plot(channelTimeVsEnergyPlot.get(0));
- plotter.region(3).plot(channelRawWaveform.get(0));
- plotter.region(3).style().yAxisStyle().setLabel("Amplitude (mV)");
- plotter.region(3).style().xAxisStyle().setLabel("Time (ns)");
+
+
+
+ plotter.region(0).plot(channelEnergyPlot.get(id));
+ plotter.region(0).style().xAxisStyle().setLabel("Hit energy (GeV)");
+ plotter.region(0).style().yAxisStyle().setLabel("");
+
+ plotter.region(1).plot(channelTimePlot.get(id));
+ plotter.region(1).style().xAxisStyle().setLabel("Hit Time (ns)");
+ plotter.region(1).style().yAxisStyle().setLabel("");
+
+ plotter.region(2).plot(channelTimeVsEnergyPlot.get(id));
+ plotter.region(2).style().xAxisStyle().setLabel("Hit Time (ns)");
+ plotter.region(2).style().yAxisStyle().setLabel("Hit Energy (GeV)");
+
+
+ plotter.region(3).plot(channelRawWaveform.get(id));
+ plotter.region(3).style().xAxisStyle().setLabel("Hit energy (GeV)");
+ plotter.region(3).style().yAxisStyle().setLabel("");
plotter.region(3).style().dataStyle().fillStyle().setColor("orange");
plotter.region(3).style().dataStyle().markerStyle().setColor("orange");
plotter.region(3).style().dataStyle().errorBarStyle().setVisible(false);
+
+
System.out.println("Create the event viewer");
viewer=new PEventViewer();
viewer.addCrystalListener(this);
@@ -199,7 +213,7 @@
row=hit.getIdentifierFieldValue("iy");
column=hit.getIdentifierFieldValue("ix");
if ((row!=0)&&(column!=0)){
- ii = EcalMonitoringUtils.getHistoIDFromRowColumn(row,column);
+ ii = ECalUtils.getHistoIDFromRowColumn(row,column);
if (hit.getCorrectedEnergy() > 0) { //A.C. > 0 for the 2D plot drawing
channelEnergyPlot.get(ii).fill(hit.getCorrectedEnergy());
channelTimePlot.get(ii).fill(hit.getTime());
@@ -232,12 +246,12 @@
for (RawTrackerHit hit : hits) {
row=hit.getIdentifierFieldValue("iy");
column=hit.getIdentifierFieldValue("ix");
- if ((row!=0)&&(column!=0)){
- ii = EcalMonitoringUtils.getHistoIDFromRowColumn(row,column);
+ if ((row!=0)&&(column!=0)&&(!ECalUtils.isInHole(row,column))){
+ ii = ECalUtils.getHistoIDFromRowColumn(row,column);
if (isFirstRaw[ii]){ //at the very first hit we read for this channel, we need to read the window length and save it
isFirstRaw[ii]=false;
windowRaw[ii]=hit.getADCValues().length;
- channelRawWaveform.set(ii,aida.histogram1D(detector.getDetectorName() + " : " + inputCollectionRaw + " : Raw Waveform : " + (row) + " "+ (column)+ ": "+ii,windowRaw[ii],-0.5*ECalUtils.ecalReadoutPeriod,(-0.5+windowRaw[ii])*ECalUtils.ecalReadoutPeriod));
+ channelRawWaveform.set(ii,aida.histogram1D(detector.getDetectorName() + " : " + inputCollectionRaw + " : Raw Waveform : " + (column) + " "+ (row)+ ": "+ii,windowRaw[ii],-0.5*ECalUtils.ecalReadoutPeriod,(-0.5+windowRaw[ii])*ECalUtils.ecalReadoutPeriod));
}
if (do_update){
channelRawWaveform.get(ii).reset();
@@ -296,27 +310,50 @@
@Override
public void crystalClicked(CrystalEvent e){
+ int itmpx,itmpy;
Point displayPoint,ecalPoint;
displayPoint=e.getCrystalID();
ecalPoint=viewer.toEcalPoint(displayPoint);
- ix=(int) ecalPoint.getX(); //column
- iy=(int) ecalPoint.getY(); //raw
- id=EcalMonitoringUtils.getHistoIDFromRowColumn(iy,ix);
- System.out.println("Crystal event: "+ix+" "+iy+" "+id);
-
+ itmpx=(int) ecalPoint.getX(); //column
+ itmpy=(int) ecalPoint.getY(); //row
+
+ if ((itmpx!=0)&&(itmpy!=0)&&(!ECalUtils.isInHole(itmpy,itmpx))){
+ ix=itmpx;
+ iy=itmpy;
+ id=ECalUtils.getHistoIDFromRowColumn(iy,ix);
+ System.out.println("Crystal event: "+ix+" "+iy+" "+id);
+
+
- plotter.region(0).clear();
- plotter.region(0).plot(channelEnergyPlot.get(id));
-
- plotter.region(1).clear();
- plotter.region(1).plot(channelTimePlot.get(id));
-
+ plotter.region(0).clear();
+ plotter.region(0).plot(channelEnergyPlot.get(id));
+ plotter.region(0).style().xAxisStyle().setLabel("Hit energy (GeV)");
+ plotter.region(0).style().yAxisStyle().setLabel("");
+
+ plotter.region(1).clear();
+ plotter.region(1).plot(channelTimePlot.get(id));
+ plotter.region(1).style().xAxisStyle().setLabel("Hit Time (ns)");
+ plotter.region(1).style().yAxisStyle().setLabel("");
- plotter.region(2).clear();
- plotter.region(2).plot(channelTimeVsEnergyPlot.get(id));
-
- plotter.region(3).clear();
- plotter.region(3).plot(channelRawWaveform.get(id));
+ plotter.region(2).clear();
+ plotter.region(2).plot(channelTimeVsEnergyPlot.get(id));
+ plotter.region(2).style().yAxisStyle().setLabel("Hit Energy (GeV)");
+ plotter.region(2).style().xAxisStyle().setLabel("Hit Time (ns)");
+
+ plotter.region(3).clear();
+ plotter.region(3).plot(channelRawWaveform.get(id));
+ if (!isFirstRaw[id]){
+ plotter.region(3).style().yAxisStyle().setLabel("Signal amplitude (mV)");
+ plotter.region(3).style().xAxisStyle().setLabel("Time (ns)");
+ plotter.region(3).style().dataStyle().fillStyle().setColor("orange");
+ plotter.region(3).style().dataStyle().markerStyle().setColor("orange");
+ plotter.region(3).style().dataStyle().errorBarStyle().setVisible(false);
+ }
+ else{
+ plotter.region(3).style().xAxisStyle().setLabel("Hit energy (GeV)");
+ plotter.region(3).style().yAxisStyle().setLabel("");
+ }
+ }
}
}
java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots
--- java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -9,6 +9,7 @@
import java.util.List;
import org.hps.recon.ecal.HPSEcalCluster;
+import org.hps.recon.ecal.ECalUtils;
import org.hps.util.Redrawable;
import org.hps.util.Resettable;
import org.lcsim.event.CalorimeterHit;
@@ -87,8 +88,8 @@
occupancyPlots = new ArrayList<IHistogram1D>();
for (int ii = 0; ii < (11 * 47); ii++) {
- int row = EcalMonitoringUtils.getRowFromHistoID(ii);
- int column = EcalMonitoringUtils.getColumnFromHistoID(ii);
+ int row = ECalUtils.getRowFromHistoID(ii);
+ int column = ECalUtils.getColumnFromHistoID(ii);
occupancyPlots.add(aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Occupancy : " + (row) + " " + (column) + ": " + ii, 101, 0, 1));
}
@@ -138,7 +139,7 @@
for (CalorimeterHit hit : hits) {
int column = hit.getIdentifierFieldValue("ix");
int row = hit.getIdentifierFieldValue("iy");
- int id = EcalMonitoringUtils.getHistoIDFromRowColumn(row, column);
+ int id = ECalUtils.getHistoIDFromRowColumn(row, column);
hitCountFillPlot.fill(column, row);
chits[id]++;
nhits++;
@@ -188,10 +189,10 @@
clusterCountDrawPlot.add(clusterCountFillPlot);
occupancyDrawPlot.reset();
for (int id = 0; id < (47 * 11); id++) {
- int row = EcalMonitoringUtils.getRowFromHistoID(id);
- int column = EcalMonitoringUtils.getColumnFromHistoID(id);
+ int row = ECalUtils.getRowFromHistoID(id);
+ int column = ECalUtils.getColumnFromHistoID(id);
double mean = occupancyPlots.get(id).mean();
- if ((row != 0) && (column != 0) && (!EcalMonitoringUtils.isInHole(row, column)))
+ if ((row != 0) && (column != 0) && (!ECalUtils.isInHole(row, column)))
occupancyDrawPlot.fill(column, row, mean);
}
}
java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots
--- java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringUtils.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringUtils.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,26 +0,0 @@
-package org.hps.monitoring.ecal.plots;
-
-public class EcalMonitoringUtils{
-
- public static int getRowFromHistoID(int id){
- return (5-(id%11));
- }
-
- public static int getColumnFromHistoID(int id){
- return ((id/11)-23);
- }
-
- public static int getHistoIDFromRowColumn(int row,int column){
- return (-row+5)+11*(column+23);
- }
-
- public static Boolean isInHole(int row,int column){
- Boolean ret;
- ret=false;
- if ((row==1)||(row==-1)){
- if ((column<=-2)&&(column>=-8)) ret=true;
- }
- return ret;
- }
-
-}
java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots
--- java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalWindowPlotsXY.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalWindowPlotsXY.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -19,6 +19,8 @@
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
+import org.hps.recon.ecal.ECalUtils;
+
public class EcalWindowPlotsXY extends Driver implements ActionListener {
private String subdetectorName= "Ecal";
@@ -29,6 +31,7 @@
private Detector detector;
private IDDecoder dec;
private IHistogram1D windowPlot;
+ private IHistogram1D windowPlot1;
private int window = 10;
private JLabel xLabel, yLabel;
private JComboBox xCombo;
@@ -38,6 +41,7 @@
private boolean testX = false;
private boolean testY = false;
private int plotX, plotY;
+ private boolean isFirst = true;
public EcalWindowPlotsXY() {
int count = 0;
@@ -96,31 +100,17 @@
aida = AIDA.defaultInstance();
aida.tree().cd("/");
- plotter = aida.analysisFactory().createPlotterFactory().create("HPS ECAL Window Plots");
+ plotter = aida.analysisFactory().createPlotterFactory("ECAL Window plots").create("HPS ECAL Window Plots");
//plotterFrame = new AIDAFrame();
//plotterFrame.addPlotter(plotter);
//plotterFrame.setVisible(true);
IPlotterStyle pstyle = plotter.style();
pstyle.dataStyle().errorBarStyle().setVisible(false);
- windowPlot = aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Window Mode Data", window, -0.5, window - 0.5);
- plotter.region(0).plot(windowPlot);
-
- xCombo = new JComboBox(xList);
- xCombo.addActionListener(this);
- xLabel = new JLabel("x");
- xLabel.setLabelFor(xCombo);
- //plotterFrame.getControlsPanel().add(xLabel);
- //plotterFrame.getControlsPanel().add(xCombo);
- yCombo = new JComboBox(yList);
- yCombo.addActionListener(this);
- yLabel = new JLabel("y");
- yLabel.setLabelFor(yCombo);
- //plotterFrame.getControlsPanel().add(yLabel);
- //plotterFrame.getControlsPanel().add(yCombo);
- //plotterFrame.pack();
-
- //plotterFrame.show();
+ plotter.createRegions(1,1);
+ windowPlot1 = aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : dummy", 1, -0.5, 1 - 0.5);
+ plotter.region(0).plot(windowPlot1);
+ plotter.show();
}
public void endOfData() {
@@ -137,8 +127,15 @@
int x = dec.getValue("ix");
int y = dec.getValue("iy");
// System.out.println("got hit: x= " + x + ", y= " + y);
- if (hit.getADCValues().length != window) {
- throw new RuntimeException("Hit has unexpected window length " + hit.getADCValues().length + ", not " + window);
+ if (isFirst) {
+ System.out.println("FIRST!!!");
+ isFirst=false;
+ window=hit.getADCValues().length;
+ windowPlot = aida.histogram1D(detector.getDetectorName() + " : " + inputCollection + " : Window Mode Data", window, -0.5, window - 0.5);
+ plotter.region(0).clear();
+ plotter.region(0).plot(windowPlot);
+ plotter.region(0).refresh();
+
}
if (testX && x != plotX) {
continue;
@@ -148,7 +145,7 @@
}
windowPlot.reset();
for (int i = 0; i < window; i++) {
- windowPlot.fill(i, hit.getADCValues()[i]);
+ windowPlot.fill(i, hit.getADCValues()[i]*ECalUtils.adcResolution);
}
}
java/branches/hps-java_HPSJAVA-88/parent
--- java/branches/hps-java_HPSJAVA-88/parent/pom.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/parent/pom.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -68,7 +68,7 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
- <version>3.8.2</version>
+ <version>4.11</version>
</dependency>
</dependencies>
<!-- DO NOT EDIT THESE DEPENDENCY VERSIONS MANUALLY. -->
java/branches/hps-java_HPSJAVA-88/plugin
--- java/branches/hps-java_HPSJAVA-88/plugin/pom.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/plugin/pom.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,23 +1,19 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-
<modelVersion>4.0.0</modelVersion>
<artifactId>hps-plugin</artifactId>
<name>plugin</name>
<description>HPS JAS3 Plugin</description>
-
<parent>
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
<version>3.0.3-SNAPSHOT</version>
</parent>
-
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/plugin/</url>
<connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/plugin/</connection>
<developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/plugin/</developerConnection>
</scm>
-
<build>
<resources>
<resource>
@@ -51,15 +47,22 @@
</property>
</redirectUrls>
<includes>
- <include>org.hps:hps-users</include>
+ <include>org.apache.commons:commons-math3</include>
+ <include>org.hps:hps-analysis</include>
<include>org.hps:hps-conditions</include>
- <include>org.hps:hps-util</include>
<include>org.hps:hps-detector-data</include>
- <include>org.hps:hps-jevio</include>
+ <include>org.hps:hps-ecal-readout-sim</include>
+ <include>org.hps:hps-ecal-recon</include>
<include>org.hps:hps-et</include>
+ <include>org.hps:hps-evio</include>
+ <include>org.hps:hps-recon</include>
+ <include>org.hps:hps-record-util</include>
+ <include>org.hps:hps-tracking</include>
+ <include>org.hps:hps-users</include>
+ <include>org.hps:hps-util</include>
<include>mysql:mysql-connector-java</include>
- <include>org.apache.commons:commons-math3</include>
- <include>org.hps:cmsg</include>
+ <include>org.jlab.coda:jevio</include>
+ <include>org.jlab.coda:et</include>
</includes>
</configuration>
<executions>
@@ -78,7 +81,6 @@
</plugin>
</plugins>
</build>
-
<dependencies>
<dependency>
<groupId>org.hps</groupId>
@@ -88,8 +90,15 @@
<groupId>org.hps</groupId>
<artifactId>hps-users</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-record-util</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-evio</artifactId>
+ </dependency>
</dependencies>
-
<profiles>
<profile>
<id>submit-plugin-descriptor</id>
@@ -117,5 +126,4 @@
</build>
</profile>
</profiles>
-
</project>
java/branches/hps-java_HPSJAVA-88/record-util
--- java/branches/hps-java_HPSJAVA-88/record-util/pom.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/pom.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -13,7 +13,7 @@
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/record-util/</url>
<connection>scm:svn:svn://svn.freehep.org/hps/java/trunk/record-util/</connection>
<developerConnection>scm:svn:svn://svn.freehep.org/hps/java/trunk/record-util/</developerConnection>
- </scm>
+ </scm>
<build>
<plugins>
<plugin>
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -10,44 +10,45 @@
/**
* Implementation of <tt>AbstractRecordSource</tt> using a dynamic queue that
- * can receive events "on the fly" e.g. from an ET ring.
+ * can receive events "on the fly" e.g. from an ET ring. Polling is used in the
+ * {@link #next()} method to get the next record, which might not be immediately
+ * available.
*/
+// TODO: Add max elements argument to limit pile up of unconsumed events.
public abstract class AbstractRecordQueue<RecordType> extends AbstractRecordSource {
// The queue, which is a linked list with blocking behavior.
- BlockingQueue<RecordType> records = new LinkedBlockingQueue<RecordType>();
+ BlockingQueue<RecordType> records;
// The current LCIO events.
RecordType currentRecord;
// The amount of time to wait for an LCIO event from the queue before dying.
- long timeOutMillis = 1000;
+ long timeOutMillis = -1;
/**
* Constructor that takes the timeout time in seconds.
* @param timeoutSeconds the timeout time in seconds
*/
- public AbstractRecordQueue(long timeoutMillis) {
+ public AbstractRecordQueue(long timeoutMillis, int maxSize) {
this.timeOutMillis = timeoutMillis;
+ records = new LinkedBlockingQueue<RecordType>(maxSize);
}
public AbstractRecordQueue() {
+ // Unlimited queue size.
+ records = new LinkedBlockingQueue<RecordType>();
}
/**
- * Set the time wait time before the poll call times out.
- * @param timeoutMillis
- */
- public void setTimeOutMillis(long timeoutMillis) {
- this.timeOutMillis = timeoutMillis;
- }
-
- /**
* Add a record to the queue.
+ * If the queue is full, then drain it first.
* @param event the LCIO event to add
*/
public void addRecord(RecordType record) {
- records.add(record);
+ if (records.remainingCapacity() > 0)
+ records.add(record);
+ // TODO: Maybe automatically drain the queue here if at capacity???
}
@Override
@@ -64,26 +65,6 @@
public boolean supportsNext() {
return true;
}
-
- @Override
- public boolean supportsPrevious() {
- return false;
- }
-
- @Override
- public boolean supportsIndex() {
- return false;
- }
-
- @Override
- public boolean supportsShift() {
- return false;
- }
-
- @Override
- public boolean supportsRewind() {
- return false;
- }
@Override
public boolean hasCurrent() {
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/EndRunException.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/EndRunException.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,12 +1,9 @@
package org.hps.record;
-import java.io.IOException;
-
/**
* An Exception thrown when an end run occurs.
*/
-// TODO: Add run number to this class.
-public class EndRunException extends IOException {
+public class EndRunException extends RuntimeException {
int runNumber;
@@ -15,6 +12,10 @@
this.runNumber = runNumber;
}
+ /**
+ * Get the run number.
+ * @return The run number.
+ */
public int getRunNumber() {
return runNumber;
}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/ErrorState.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/ErrorState.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,36 +0,0 @@
-package org.hps.record;
-
-public class ErrorState {
-
- Exception lastError;
-
- public ErrorState() {
- }
-
- public Throwable getLastError() {
- return lastError;
- }
-
- public void setLastError(Exception lastError) {
- this.lastError = lastError;
- }
-
- public boolean hasError() {
- return lastError != null;
- }
-
- public void rethrow() throws Exception {
- Exception throwMe = lastError;
- clear(); // Clear error state before throwing.
- throw throwMe;
- }
-
- public void clear() {
- lastError = null;
- }
-
- public void print() {
- lastError.printStackTrace();
- }
-
-}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/HasErrorState.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/HasErrorState.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,6 +0,0 @@
-package org.hps.record;
-
-
-public interface HasErrorState {
- ErrorState getErrorState();
-}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/MaxRecordsException.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/MaxRecordsException.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,8 +1,9 @@
package org.hps.record;
/**
- * Exception to be thrown when maximum number of records is reached.
+ * Exception thrown when maximum number of records is reached.
*/
+// FIXME: Use loop(nevents) instead of this for controlling number of records run.
public class MaxRecordsException extends Exception {
int maxRecords;
@@ -12,6 +13,10 @@
this.maxRecords = maxRecords;
}
+ /**
+ * Get the maximum number of records.
+ * @return The maximum number of records.
+ */
public int getMaxRecords() {
return maxRecords;
}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/RecordProcessingException.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/RecordProcessingException.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,6 +1,11 @@
package org.hps.record;
-public class RecordProcessingException extends RuntimeException {
+/**
+ * Generic error type for exceptions that occur during event processing.
+ * It extends <code>RuntimeException</code> so that methods need not
+ * declare a <code>throws</code> clause in their definitions to use it.
+ */
+public class RecordProcessingException extends RuntimeException {
public RecordProcessingException(String message, Throwable x) {
super(message, x);
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/RecordProcessor.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/RecordProcessor.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,7 +1,11 @@
package org.hps.record;
/**
- * This is a generic interface for event processing.
+ * This is a generic interface for event processing which implements
+ * hooks for starting the job, starting a new run, processing individual
+ * records, ending a run and ending a job. This interface should not
+ * be implemented directly. Instead the {@link AbstractRecordProcessor}
+ * should be extended with a specific type declaration.
*
* @param <RecordType> The concrete type of the event record.
*/
@@ -34,4 +38,9 @@
* End of job action.
*/
void endJob();
+
+ /**
+ * Action to be taken when recording processing is suspended.
+ */
+ void suspend();
}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeLoop.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeLoop.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,11 +1,26 @@
package org.hps.record.composite;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
import org.freehep.record.loop.DefaultRecordLoop;
import org.freehep.record.source.NoSuchRecordException;
import org.freehep.record.source.RecordSource;
import org.hps.record.EndRunException;
import org.hps.record.MaxRecordsException;
-import org.hps.record.et.EtSource.EtSourceException;
+import org.hps.record.enums.DataSourceType;
+import org.hps.record.enums.ProcessingStage;
+import org.hps.record.et.EtEventProcessor;
+import org.hps.record.et.EtEventSource;
+import org.hps.record.et.EtEventSource.EtSourceException;
+import org.hps.record.evio.EvioEventProcessor;
+import org.hps.record.evio.EvioFileSource;
+import org.lcsim.conditions.ConditionsManager;
+import org.lcsim.util.Driver;
+import org.lcsim.util.loop.LCIOEventSource;
+import org.lcsim.util.loop.LCSimConditionsManagerImplementation;
/**
* Implementation of a composite record loop for processing
@@ -13,48 +28,75 @@
*/
public final class CompositeLoop extends DefaultRecordLoop {
- CompositeSource recordSource = new CompositeSource();
- CompositeLoopAdapter adapter = new CompositeLoopAdapter();
+ CompositeRecordSource recordSource = new CompositeRecordSource();
+ List<CompositeLoopAdapter> adapters = new ArrayList<CompositeLoopAdapter>();
+ boolean paused = false;
boolean stopOnErrors = true;
boolean done = false;
+ CompositeLoopConfiguration config = null;
+
+ // Look in javadoc API and DefaultRecordLoop for what this does.
+ //this._stopOnEOF
+
+ /**
+ * No argument constructor.
+ * The {@link #configure(CompositeLoopConfiguration)} method must be
+ * called on the loop manually.
+ */
public CompositeLoop() {
setRecordSource(recordSource);
- addLoopListener(adapter);
- addRecordListener(adapter);
}
+ /**
+ * Create the loop with the given configuration.
+ * @param config The configuration parameters of the loop.
+ */
+ public CompositeLoop(CompositeLoopConfiguration config) {
+ setRecordSource(recordSource);
+ configure(config);
+ }
+
+ /**
+ * Set to true in order to have this loop stop on all
+ * event processing errors. Certain types of fatal errors
+ * will never be ignored.
+ * @param stopOnErrors True for this loop to stop on errors.
+ */
public void setStopOnErrors(boolean stopOnErrors) {
this.stopOnErrors = stopOnErrors;
}
+
+ /**
+ * Add a {@link CompositeLoopAdapter} which will process
+ * {@link CompositeRecord} objects.
+ * @param adapter The CompositeLoopAdapter object.
+ */
+ public void addAdapter(CompositeLoopAdapter adapter) {
+ addLoopListener(adapter);
+ addRecordListener(adapter);
+ }
/**
* Set the <code>RecordSource</code> which provides <code>CompositeRecord</code> objects.
*/
- public void setRecordSource(RecordSource source) {
+ public final void setRecordSource(RecordSource source) {
if (!source.getRecordClass().isAssignableFrom(CompositeRecord.class)) {
throw new IllegalArgumentException("The RecordSource has the wrong class.");
}
super.setRecordSource(source);
}
-
+
/**
- * Add a <code>CompositeRecordProcessor</code> which will receive <code>CompositeRecord</code>
- * objects.
- * @param processor The <code>CompositeRecordProcessor</code> to add.
- */
- public void addProcessor(CompositeProcessor processor) {
- adapter.addProcessor(processor);
- }
-
- /**
- * Handle errors in the client such as adapters.
+ * Handle errors from the client such as registered adapters.
* If the loop is setup to try and continue on errors,
* only non-fatal record processing exceptions are ignored.
*/
protected void handleClientError(Throwable x) {
-
+
+ x.printStackTrace();
+
// Is the error ignorable?
if (isIgnorable(x)) {
// Ignore the error!
@@ -69,8 +111,13 @@
done = true;
}
+ /**
+ * Handle errors thrown by the <code>RecordSource</code>.
+ */
protected void handleSourceError(Throwable x) {
-
+
+ x.printStackTrace();
+
// Is the error ignorable?
if (isIgnorable(x)) {
// Ignore the error!
@@ -85,6 +132,14 @@
done = true;
}
+ /**
+ * True if an error is ignorable. If <code>stopOnErrors</code>
+ * is true, then this method always returns false. Otherwise,
+ * the error cause determines whether the loop can continue
+ * processing.
+ * @param x The error that occurred.
+ * @return True if the error can be ignored.
+ */
private boolean isIgnorable(Throwable x) {
// Should the loop try to recover from the error if possible?
@@ -121,12 +176,180 @@
}
}
+ /**
+ * True if the loop is done processing. This is
+ * set to <code>true</code> when fatal errors occur.
+ * @return
+ */
public boolean isDone() {
return done;
}
+ /**
+ * Get the last error that occurred.
+ * @return The last error that occurred.
+ */
public Throwable getLastError() {
- return _exception;
+ return _exception;
}
-}
-
\ No newline at end of file
+
+ /**
+ * Pause the event processing.
+ */
+ public void pause() {
+ execute(Command.PAUSE);
+ paused = true;
+ }
+
+ /**
+ * Resume event processing from pause mode.
+ */
+ public void resume() {
+ paused = false;
+ }
+
+ /**
+ * True if loop is paused.
+ * @return True if loop is current paused.
+ */
+ public boolean isPaused() {
+ return paused;
+ }
+
+ /**
+ * Loop over events from the source.
+ * @param number The number of events to process or -1 for unlimited.
+ * @return The number of records that were processed.
+ */
+ public long loop(long number) {
+ if (number < 0L) {
+ execute(Command.GO, true);
+ } else {
+ execute(Command.GO_N, number, true);
+ execute(Command.STOP);
+ }
+ return getSupplied();
+ }
+
+ /**
+ * Configure the loop using a {@link CompositeLoopConfiguration} object.
+ * @param config The CompositeLoopConfiguration object containing the loop configuration parameter values.
+ */
+ public final void configure(CompositeLoopConfiguration config) {
+
+ if (this.config != null)
+ throw new RuntimeException("CompositeLoop has already been configured.");
+
+ this.config = config;
+
+ EtEventAdapter etAdapter = null;
+ EvioEventAdapter evioAdapter = null;
+ LcioEventAdapter lcioAdapter = null;
+ CompositeLoopAdapter compositeAdapter = new CompositeLoopAdapter();
+
+ // Was there no RecordSource provided explicitly?
+ if (config.recordSource == null) {
+ // Using an ET server connection?
+ if (config.sourceType.equals(DataSourceType.ET_SERVER)) {
+ if (config.connection != null)
+ etAdapter = new EtEventAdapter(new EtEventSource(config.connection));
+ else
+ throw new IllegalArgumentException("Configuration is missing a valid ET connection.");
+ // Using an EVIO file?
+ } else if (config.sourceType.equals(DataSourceType.EVIO_FILE)) {
+ if (config.filePath != null) {
+ evioAdapter = new EvioEventAdapter(new EvioFileSource(new File(config.filePath)));
+ } else {
+ throw new IllegalArgumentException("Configuration is missing a file path.");
+ }
+ // Using an LCIO file?
+ } else if (config.sourceType.equals(DataSourceType.LCIO_FILE)) {
+ if (config.filePath != null)
+ try {
+ lcioAdapter = new LcioEventAdapter(new LCIOEventSource(new File(config.filePath)));
+ } catch (IOException e) {
+ throw new RuntimeException("Error configuring LCIOEventSource.", e);
+ }
+ else
+ throw new IllegalArgumentException("Configuration is missing a file path.");
+ }
+ }
+
+ // Configure ET system.
+ if (config.sourceType == DataSourceType.ET_SERVER) {
+ //System.out.println("compositeLoop.addAdapter(etAdapter)");
+ addAdapter(etAdapter);
+ }
+
+ // Configure EVIO processing.
+ if (config.processingStage.ordinal() >= ProcessingStage.EVIO.ordinal()) {
+ if (config.sourceType.ordinal() <= DataSourceType.EVIO_FILE.ordinal()) {
+ if (evioAdapter == null)
+ evioAdapter = new EvioEventAdapter();
+ //System.out.println("compositeLoop.addAdapter(evioAdapter)");
+ addAdapter(evioAdapter);
+ }
+ }
+
+ // Configure LCIO processing.
+ if (config.processingStage.ordinal() >= ProcessingStage.LCIO.ordinal()) {
+ if (lcioAdapter == null)
+ lcioAdapter = new LcioEventAdapter();
+ //System.out.println("compositeLoop.addAdapter(lcioAdapter)");
+ addAdapter(lcioAdapter);
+ if (config.eventBuilder != null) {
+ if (config.detectorName != null) {
+ // Is LCSim ConditionsManager installed yet?
+ if (!ConditionsManager.isSetup())
+ // Setup LCSim conditions system if not already.
+ LCSimConditionsManagerImplementation.register();
+ config.eventBuilder.setDetectorName(config.detectorName);
+ } else {
+ throw new IllegalArgumentException("Missing detectorName in configuration.");
+ }
+ lcioAdapter.setLCSimEventBuilder(config.eventBuilder);
+ } else {
+ throw new IllegalArgumentException("Missing an LCSimEventBuilder in configuration.");
+ }
+ }
+
+ // Set whether to stop on event processing errors.
+ setStopOnErrors(config.stopOnErrors);
+
+ // Set whether to stop on end run EVIO records.
+ if (evioAdapter != null)
+ evioAdapter.setStopOnEndRun(config.stopOnEndRun);
+
+ // Add EtEventProcessors to loop.
+ for (EtEventProcessor processor : config.etProcessors) {
+ etAdapter.addProcessor(processor);
+ }
+
+ // Add EvioEventProcessors to loop.
+ for (EvioEventProcessor processor : config.evioProcessors) {
+ evioAdapter.addProcessor(processor);
+ }
+
+ // Add Drivers to loop.
+ for (Driver driver : config.drivers) {
+ lcioAdapter.addDriver(driver);
+ }
+
+ // Add CompositeLoopAdapter which should execute last.
+ addAdapter(compositeAdapter);
+
+ // Add CompositeRecordProcessors to loop.
+ for (CompositeRecordProcessor processor : config.compositeProcessors) {
+ compositeAdapter.addProcessor(processor);
+ }
+
+ if (config.supplyLcioEvents) {
+ addAdapter(new LcioEventSupplier(config.timeout, config.maxQueueSize));
+ }
+
+ // Max records was set?
+ if (config.maxRecords != -1) {
+ compositeAdapter.addProcessor(new MaxRecordsProcessor(config.maxRecords));
+ }
+ }
+}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeLoopAdapter.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeLoopAdapter.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -11,16 +11,19 @@
/**
* Adapter for listening on the {@link CompositeLoop} for records and loop events.
+ * Classes that should be part of an "event processing chain" implemented by the
+ * {@link CompositeLoop} should extend this API in order to receive {@link CompositeRecord}
+ * objects that can be modified.
*/
-public final class CompositeLoopAdapter extends AbstractLoopListener implements RecordListener {
+public class CompositeLoopAdapter extends AbstractLoopListener implements RecordListener {
- List<CompositeProcessor> processors = new ArrayList<CompositeProcessor>();
+ List<CompositeRecordProcessor> processors = new ArrayList<CompositeRecordProcessor>();
/**
* Add a <tt>CompositeRecordProcessor</tt> that will listen to this loop.
* @param processor The composite record processor to add.
*/
- void addProcessor(CompositeProcessor processor) {
+ public void addProcessor(CompositeRecordProcessor processor) {
processors.add(processor);
}
@@ -30,45 +33,35 @@
*/
public void finish(LoopEvent loopEvent) {
// Call end job hook on all processors.
- for (CompositeProcessor processor : processors) {
+ for (CompositeRecordProcessor processor : processors) {
processor.endJob();
}
}
/**
- * Start event processing which will call {@link CompositeProcessor#startJob()}
+ * Start event processing which will call {@link CompositeRecordProcessor#startJob()}
* on all the registered processors.
* @param loopEvent
*/
public void start(LoopEvent loopEvent) {
- for (CompositeProcessor processor : processors) {
+ for (CompositeRecordProcessor processor : processors) {
processor.startJob();
}
}
-
+
/**
- * Suspend the loop.
- * @param loopEvent
- */
- public void suspend(LoopEvent loopEvent) {
- if (loopEvent.getException() != null) {
- loopEvent.getException().printStackTrace();
- }
- }
-
- /**
* Process one record.
* @param record
*/
@Override
public void recordSupplied(RecordEvent record) {
- for (CompositeProcessor processor : processors) {
+ for (CompositeRecordProcessor processor : processors) {
try {
// Activate the processing step on the CompositeRecord.
processor.process((CompositeRecord) record.getRecord());
} catch (Exception e) {
// Throw the processing error so the loop can perform proper handling of it.
- throw new RecordProcessingException("Error during record processing.", e);
+ throw new RecordProcessingException("Exception occurred during record processing.", e);
}
}
}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeProcessor.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeProcessor.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,38 +0,0 @@
-package org.hps.record.composite;
-
-import org.hps.record.ErrorState;
-import org.hps.record.HasErrorState;
-import org.hps.record.RecordProcessor;
-
-/**
- * An <code>EventProcessor</code> implementation for processing <code>CompositeRecord</code>
- * records.
- */
-public abstract class CompositeProcessor implements RecordProcessor<CompositeRecord>, HasErrorState {
-
- ErrorState errorState = new ErrorState();
-
- public ErrorState getErrorState() {
- return errorState;
- }
-
- @Override
- public void startJob() {
- }
-
- @Override
- public void startRun(CompositeRecord event) {
- }
-
- @Override
- public void process(CompositeRecord event) throws Exception {
- }
-
- @Override
- public void endRun(CompositeRecord event) {
- }
-
- @Override
- public void endJob() {
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeRecord.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeRecord.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -6,7 +6,9 @@
/**
* This class is used to group together corresponding ET, EVIO and LCIO events
- * for use by the {@link CompositeLoop}.
+ * for use by the {@link CompositeLoop}. The loop's <code>RecordListener</code>
+ * objects may alter this record by setting references to event objects
+ * such as an <code>EvioEvent</code>.
*/
public final class CompositeRecord {
@@ -17,42 +19,82 @@
int sequenceNumber = -1;
int eventNumber = -1;
+ /**
+ * Set the sequence number of this record.
+ * @param sequenceNumber The sequence number.
+ */
public void setSequenceNumber(int sequenceNumber) {
this.sequenceNumber = sequenceNumber;
}
+ /**
+ * Set the event number of this record e.g. from EVIO or LCIO.
+ * @param eventNumber The event number of this recrod.
+ */
public void setEventNumber(int eventNumber) {
this.eventNumber = eventNumber;
}
+ /**
+ * Set a reference to an <code>EtEvent</code>.
+ * @param etEvent The EtEvent.
+ */
public void setEtEvent(EtEvent etEvent) {
this.etEvent = etEvent;
}
+ /**
+ * Set a reference to an <code>EvioEvent</code>.
+ * @param evioEvent The EvioEvent.
+ */
public void setEvioEvent(EvioEvent evioEvent) {
this.evioEvent = evioEvent;
}
+ /**
+ * Set a reference to an org.lcsim LCIO event (EventHeader).
+ * @param lcioEvent The LCIO EventHeader.
+ */
public void setLcioEvent(EventHeader lcioEvent) {
this.lcioEvent = lcioEvent;
}
+ /**
+ * Get the <code>EtEvent</code>.
+ * @return The EtEvent.
+ */
public EtEvent getEtEvent() {
return etEvent;
}
+ /**
+ * Get the <code>EvioEvent</code>.
+ * @return The EvioEvent.
+ */
public EvioEvent getEvioEvent() {
return evioEvent;
}
+ /**
+ * Get the org.lcsim event.
+ * @return The org.lcsim event.
+ */
public EventHeader getLcioEvent() {
return lcioEvent;
}
+ /**
+ * Get the event sequence number.
+ * @return The event sequence number.
+ */
public int getSequenceNumber() {
return sequenceNumber;
}
+ /**
+ * Get the event number.
+ * @return The event number.
+ */
public int getEventNumber() {
return eventNumber;
}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeSource.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/composite/CompositeSource.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,67 +0,0 @@
-package org.hps.record.composite;
-
-import java.io.IOException;
-
-import org.freehep.record.source.AbstractRecordSource;
-import org.freehep.record.source.NoSuchRecordException;
-
-/**
- * A record source providing <code>CompositeRecord</code> objects.
- */
-public final class CompositeSource extends AbstractRecordSource {
-
- CompositeRecord currentRecord;
- int sequenceNumber = 0;
-
- public void next() throws IOException, NoSuchRecordException {
- currentRecord = new CompositeRecord();
- currentRecord.setSequenceNumber(sequenceNumber);
- ++sequenceNumber;
- }
-
- @Override
- public Object getCurrentRecord() throws IOException {
- return currentRecord;
- }
-
- @Override
- public boolean supportsCurrent() {
- return true;
- }
-
- @Override
- public boolean supportsNext() {
- return true;
- }
-
- @Override
- public boolean supportsPrevious() {
- return false;
- }
-
- @Override
- public boolean supportsIndex() {
- return false;
- }
-
- @Override
- public boolean supportsShift() {
- return false;
- }
-
- @Override
- public boolean supportsRewind() {
- return false;
- }
-
- @Override
- public boolean hasCurrent() {
- return currentRecord != null;
- }
-
- @Override
- public boolean hasNext() {
- // FIXME: Not sure about this one.
- return true;
- }
-}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtAdapter.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtAdapter.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,58 +0,0 @@
-package org.hps.record.et;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.freehep.record.loop.AbstractLoopListener;
-import org.freehep.record.loop.LoopEvent;
-import org.freehep.record.loop.RecordEvent;
-import org.freehep.record.loop.RecordListener;
-import org.hps.record.RecordProcessingException;
-import org.jlab.coda.et.EtEvent;
-
-/**
- * Adapter for processing <tt>EtEvent</tt> objects using a loop.
- */
-public final class EtAdapter extends AbstractLoopListener implements RecordListener {
-
- List<EtProcessor> processors = new ArrayList<EtProcessor>();
-
- void addEtEventProcessor(EtProcessor processor) {
- processors.add(processor);
- }
-
- @Override
- public void recordSupplied(RecordEvent recordEvent) {
- Object object = recordEvent.getRecord();
- if (object instanceof EtEvent) {
- EtEvent event = (EtEvent)object;
- processEvent(event);
- }
- }
-
- @Override
- public void suspend(LoopEvent event) {
- if (event.getException() != null)
- throw new RecordProcessingException("ET system error.", event.getException());
- }
-
- @Override
- public void start(LoopEvent event) {
- for (EtProcessor processor : processors) {
- processor.startJob();
- }
- }
-
- @Override
- public void finish(LoopEvent event) {
- for (EtProcessor processor : processors) {
- processor.endJob();
- }
- }
-
- private void processEvent(EtEvent event) {
- for (EtProcessor processor : processors) {
- processor.process(event);
- }
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtConnection.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtConnection.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -3,9 +3,12 @@
import java.io.IOException;
import org.jlab.coda.et.EtAttachment;
+import org.jlab.coda.et.EtConstants;
import org.jlab.coda.et.EtEvent;
import org.jlab.coda.et.EtStation;
+import org.jlab.coda.et.EtStationConfig;
import org.jlab.coda.et.EtSystem;
+import org.jlab.coda.et.EtSystemOpenConfig;
import org.jlab.coda.et.enums.Mode;
import org.jlab.coda.et.enums.Modify;
import org.jlab.coda.et.exception.EtBusyException;
@@ -13,11 +16,15 @@
import org.jlab.coda.et.exception.EtDeadException;
import org.jlab.coda.et.exception.EtEmptyException;
import org.jlab.coda.et.exception.EtException;
+import org.jlab.coda.et.exception.EtExistsException;
import org.jlab.coda.et.exception.EtTimeoutException;
+import org.jlab.coda.et.exception.EtTooManyException;
import org.jlab.coda.et.exception.EtWakeUpException;
/**
- * Create an EtSystem and EtAttachment from ConnectionParameters.
+ * A class for encapsulating the connection information
+ * for an ET client including the EtSystem and EtAttachment
+ * objects.
*/
public final class EtConnection {
@@ -30,14 +37,19 @@
int chunkSize;
/**
- * Class constructor.
+ * A class constructor for internal convenience.
* @param param The connection parameters.
* @param sys The ET system.
* @param att The ET attachment.
* @param stat The ET station.
*/
- public EtConnection(EtSystem sys, EtAttachment att, EtStation stat,
- Mode waitMode, int waitTime, int chunkSize) {
+ private EtConnection(
+ EtSystem sys,
+ EtAttachment att,
+ EtStation stat,
+ Mode waitMode,
+ int waitTime,
+ int chunkSize) {
this.sys = sys;
this.att = att;
this.stat = stat;
@@ -88,10 +100,12 @@
}
/**
- * Read EtEvent objects from the ET ring.
+ * Read EtEvent objects from the ET server.
+ *
* Preserve all specific Exception types in the throws clause so caller
* may implement their own error and state handling.
- * @return
+ *
+ * @return The array of EtEvents.
* @throws IOException
* @throws EtException
* @throws EtDeadException
@@ -110,7 +124,113 @@
waitMode,
Modify.NOTHING,
waitTime,
- chunkSize);
-
+ chunkSize);
}
+
+ /**
+ * Create an EtConnection with full list of configuration parameters.
+ * @param name The name of the ET system e.g. the buffer file on disk.
+ * @param host The name of the network host.
+ * @param port The port of the network host.
+ * @param blocking True for blocking behavior.
+ * @param queueSize The queue size.
+ * @param prescale The event prescale or 0 for none.
+ * @param stationName The name of the ET station.
+ * @param stationPosition The position of the ET station.
+ * @param waitMode The wait mode.
+ * @param waitTime The wait time if using timed wait.
+ * @param chunkSize The number of ET events to return at once.
+ * @return The EtConnection created from the parameters.
+ */
+ public static EtConnection createConnection(
+ String name,
+ String host,
+ int port,
+ boolean blocking,
+ int queueSize,
+ int prescale,
+ String stationName,
+ int stationPosition,
+ Mode waitMode,
+ int waitTime,
+ int chunkSize) {
+ try {
+
+ // make a direct connection to ET system's tcp server
+ EtSystemOpenConfig etConfig = new EtSystemOpenConfig(
+ name,
+ host,
+ port);
+
+ // create ET system object with verbose debugging output
+ EtSystem sys = new EtSystem(etConfig, EtConstants.debugInfo);
+ sys.open();
+
+ // configuration of a new station
+ EtStationConfig stationConfig = new EtStationConfig();
+ //statConfig.setFlowMode(cn.flowMode);
+ // FIXME: Flow mode hard-coded.
+ stationConfig.setFlowMode(EtConstants.stationSerial);
+ if (!blocking) {
+ stationConfig.setBlockMode(EtConstants.stationNonBlocking);
+ if (queueSize > 0) {
+ stationConfig.setCue(queueSize);
+ }
+ }
+ // Set prescale.
+ if (prescale > 0) {
+ //System.out.println("setting prescale to " + cn.prescale);
+ stationConfig.setPrescale(prescale);
+ }
+
+ // Create the station.
+ EtStation stat = sys.createStation(
+ stationConfig,
+ stationName,
+ stationPosition);
+
+ // attach to new station
+ EtAttachment att = sys.attach(stat);
+
+ // Return new connection.
+ EtConnection connection = new EtConnection(
+ sys,
+ att,
+ stat,
+ waitMode,
+ waitTime,
+ chunkSize
+ );
+
+ return connection;
+
+ } catch (IOException |
+ EtException |
+ EtExistsException |
+ EtClosedException |
+ EtDeadException |
+ EtTooManyException e) {
+ throw new RuntimeException("Failed to create ET connection.", e);
+ }
+ }
+
+ /**
+ * Create an EtConnection with a set of default parameters.
+ * @return An EtConnection with default parameters.
+ */
+ public static EtConnection createDefaultConnection() {
+ return createConnection(
+ "ETBuffer",
+ "localhost",
+ 11111,
+ false,
+ 0,
+ 0,
+ "MY_STATION",
+ 1,
+ Mode.TIMED,
+ 5000000,
+ 1);
+ }
+
}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtConnectionParameters.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtConnectionParameters.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,179 +0,0 @@
-package org.hps.record.et;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-
-import org.jlab.coda.et.EtConstants;
-import org.jlab.coda.et.enums.Mode;
-
-/**
- * Connection parameters for ET system consumer.
- */
-public final class EtConnectionParameters {
-
- /**
- * Parameters that are externally settable from within the package.
- */
- String bufferName = "ETBuffer";
- String host = null;
- int port = EtConstants.serverPort;
- boolean blocking = false;
- boolean verbose = false;
- String statName = "MY_STATION";
- int chunk = 1;
- int qSize = 0;
- int position = 1;
- int pposition = 0;
- int flowMode = EtConstants.stationSerial;
- Mode waitMode = Mode.TIMED;
- int waitTime = 10000000; // wait time in microseconds
- int prescale = 1;
-
- public void setBufferName(String etName) {
- this.bufferName = etName;
- }
-
- public void setHost(String host) {
- this.host = host;
- }
-
- public void setPort(int port) {
- this.port = port;
- }
-
- public void setBlocking(boolean blocking) {
- this.blocking = blocking;
- }
-
- public void setVerbose(boolean verbose) {
- this.verbose = verbose;
- }
-
- public void setStationName(String stationName) {
- this.statName = stationName;
- }
-
- public void setChunkSize(int chunk) {
- this.chunk = chunk;
- }
-
- public void setQueueSize(int qSize) {
- this.qSize = qSize;
- }
-
- public void setStationPosition(int position) {
- this.position = position;
- }
-
- public void setStationsParallelPosition(int pposition) {
- this.pposition = pposition;
- }
-
- public void setWaitMode(Mode waitMode) {
- this.waitMode = waitMode;
- }
-
- public void setWaitTime(int waitTime) {
- this.waitTime = waitTime;
- }
-
- public void setPreScale(int prescale) {
- this.prescale = prescale;
- }
-
- public Mode getWaitMode() {
- return waitMode;
- }
-
- public int getWaitTime() {
- return waitTime;
- }
-
- public int getChunkSize() {
- return chunk;
- }
-
- public String getBufferName() {
- return bufferName;
- }
-
- public String getHost() {
- return host;
- }
-
- public int getPort() {
- return port;
- }
-
- public boolean getBlocking() {
- return blocking;
- }
-
- public boolean getVerbose() {
- return verbose;
- }
-
- public String getStationName() {
- return statName;
- }
-
- public int getPrescale() {
- return prescale;
- }
-
- public int getQueueSize() {
- return qSize;
- }
-
- public int getStationPosition() {
- return position;
- }
-
- public int getStationParallelPosition() {
- return pposition;
- }
-
- /**
- * Class constructor.
- */
- public EtConnectionParameters() {
- // Set the default host to this machine.
- try {
- InetAddress addr = InetAddress.getLocalHost();
- host = addr.getHostName();
- } catch (UnknownHostException e) {
- throw new ConnectionParametersException("Unable to assign default host.");
- }
- }
-
- /**
- * This is thrown from the constructor if there a problem setting up the default host.
- */
- public class ConnectionParametersException extends RuntimeException {
- ConnectionParametersException(String msg) {
- super(msg);
- }
- }
-
- /**
- * Convert this class to a readable string (properties format).
- */
- public String toString() {
- StringBuffer buf = new StringBuffer();
- buf.append("bufferName: " + bufferName + '\n');
- buf.append("host: " + host + '\n');
- buf.append("port: " + port + '\n');
- buf.append("blocking: " + blocking + '\n');
- buf.append("verbose: " + verbose + '\n');
- buf.append("statName: " + statName + '\n');
- buf.append("chunk: " + chunk + '\n');
- buf.append("qSize: " + qSize + '\n');
- buf.append("position: " + position + '\n');
- buf.append("pposition: " + pposition + '\n');
- buf.append("flowMode: " + flowMode + '\n');
- buf.append("waitMode: " + waitMode + '\n');
- buf.append("waitTime: " + waitTime + '\n');
- buf.append("prescale: " + prescale + '\n');
- return buf.toString();
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtLoop.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtLoop.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,55 +0,0 @@
-package org.hps.record.et;
-
-import org.freehep.record.loop.DefaultRecordLoop;
-import org.freehep.record.source.RecordSource;
-import org.hps.record.ErrorState;
-import org.hps.record.HasErrorState;
-import org.jlab.coda.et.EtEvent;
-
-/**
- * Record loop implementation for processing <tt>EtEvent</tt> objects.
- */
-public final class EtLoop extends DefaultRecordLoop implements HasErrorState {
-
- EtAdapter adapter = new EtAdapter();
- ErrorState errorState = new ErrorState();
-
- public EtLoop() {
- addLoopListener(adapter);
- addRecordListener(adapter);
- }
-
- /**
- * Add an <code>EtEventProcessor</code> to the loop.
- * @param processor The <code>EtEventProcessor</code> to add.
- */
- public void addEtEventProcessor(EtProcessor processor) {
- adapter.addEtEventProcessor(processor);
- }
-
- /**
- * Set the <code>RecordSource</code> for the loop.
- * @param source The <code>RecordSource</code> for the loop.
- */
- public void setRecordSource(RecordSource source) {
- if (!source.getRecordClass().isAssignableFrom(EtEvent.class)) {
- throw new IllegalArgumentException("The RecordSource has the wrong class.");
- }
- super.setRecordSource(source);
- }
-
- protected void handleClientError(Throwable x) {
- getErrorState().setLastError((Exception) x);
- getErrorState().print();
- }
-
- protected void handleSourceError(Throwable x) {
- getErrorState().setLastError((Exception) x);
- getErrorState().print();
- }
-
- @Override
- public ErrorState getErrorState() {
- return errorState;
- }
-}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtProcessor.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtProcessor.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,42 +0,0 @@
-package org.hps.record.et;
-
-import org.hps.record.RecordProcessor;
-import org.jlab.coda.et.EtEvent;
-
-/**
- * This is the basic abstract class that processors of
- * <tt>EtEvent</tt> objects should implement.
- */
-public abstract class EtProcessor implements RecordProcessor<EtEvent> {
-
- /**
- * Start of ET session.
- */
- @Override
- public void startJob() {
- }
-
- @Override
- public void startRun(EtEvent event) {
-
- }
-
- /**
- * Process one <tt>EtEvent</tt>.
- */
- @Override
- public void process(EtEvent event) {
- }
-
- @Override
- public void endRun(EtEvent event) {
-
- }
-
- /**
- * End of ET session.
- */
- @Override
- public void endJob() {
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtRecordQueue.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtRecordQueue.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,17 +0,0 @@
-package org.hps.record.et;
-
-import org.hps.record.AbstractRecordQueue;
-import org.jlab.coda.et.EtEvent;
-
-/**
- * A dynamic queue for supplying <tt>EtEvent</tt> objects to a loop.
- * This would most likely be run on a separate thread than the
- * loop to avoid undesired blocking behavior.
- */
-public final class EtRecordQueue extends AbstractRecordQueue<EtEvent> {
-
- @Override
- public Class<EtEvent> getRecordClass() {
- return EtEvent.class;
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtSource.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/et/EtSource.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,106 +0,0 @@
-package org.hps.record.et;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Queue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.freehep.record.source.AbstractRecordSource;
-import org.freehep.record.source.NoSuchRecordException;
-import org.jlab.coda.et.EtEvent;
-
-/**
- * Implement a loop record source supplying <tt>EtEvent</tt> objects
- * from an ET ring server connection.
- */
-public final class EtSource extends AbstractRecordSource {
-
- EtConnection connection;
- EtEvent currentRecord;
- Queue<EtEvent> eventQueue = new LinkedBlockingQueue<EtEvent>();
-
- public EtSource(EtConnection connection) {
- this.connection = connection;
- }
-
- @Override
- public Object getCurrentRecord() throws IOException {
- return currentRecord;
- }
-
- @Override
- public boolean supportsCurrent() {
- return true;
- }
-
- @Override
- public boolean supportsNext() {
- return true;
- }
-
- @Override
- public boolean supportsPrevious() {
- return false;
- }
-
- @Override
- public boolean supportsIndex() {
- return false;
- }
-
- @Override
- public boolean supportsShift() {
- return false;
- }
-
- @Override
- public boolean supportsRewind() {
- return false;
- }
-
- @Override
- public boolean hasCurrent() {
- return currentRecord != null;
- }
-
- @Override
- public boolean hasNext() {
- return true;
- }
-
- @Override
- public void next() throws IOException, NoSuchRecordException {
-
- // Fill the queue if there are no events cached.
- if (eventQueue.size() == 0) {
- readEtEvents();
- }
-
- // Poll the queue.
- currentRecord = eventQueue.poll();
-
- if (currentRecord == null) {
- throw new NoSuchRecordException("ET record queue is empty.");
- }
- }
-
- @Override
- public long size() {
- return this.eventQueue.size();
- }
-
- void readEtEvents() throws IOException {
- try {
- EtEvent[] mevs = connection.readEtEvents();
- eventQueue.addAll(Arrays.asList(mevs));
- } catch (Exception e) {
- throw new EtSourceException("Error while reading ET events.", e);
- }
- }
-
- public static class EtSourceException extends IOException {
- public EtSourceException(String message, Exception cause) {
- super(message, cause);
- }
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioAdapter.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioAdapter.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,80 +0,0 @@
-package org.hps.record.evio;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.freehep.record.loop.AbstractLoopListener;
-import org.freehep.record.loop.LoopEvent;
-import org.freehep.record.loop.RecordEvent;
-import org.freehep.record.loop.RecordListener;
-import org.hps.evio.EventConstants;
-import org.hps.record.RecordProcessingException;
-import org.jlab.coda.jevio.EvioEvent;
-
-/**
- * Adapter to process <tt>EvioEvent</tt> objects using a record loop.
- */
-public final class EvioAdapter extends AbstractLoopListener implements RecordListener {
-
- List<EvioProcessor> processors = new ArrayList<EvioProcessor>();
-
- void addEvioEventProcessor(EvioProcessor processor) {
- processors.add(processor);
- }
-
- @Override
- public void recordSupplied(RecordEvent recordEvent) {
- Object object = recordEvent.getRecord();
- if (object instanceof EvioEvent) {
- EvioEvent event = (EvioEvent)object;
- if (EventConstants.isPreStartEvent(event)) {
- // Start of run.
- startRun(event);
- } else if (EventConstants.isEndEvent(event)) {
- // End of run.
- endRun(event);
- } else if (EventConstants.isPhysicsEvent(event)) {
- // Process one physics event.
- processEvent(event);
- }
- }
- }
-
- @Override
- public void start(LoopEvent event) {
- for (EvioProcessor processor : processors) {
- processor.startJob();
- }
- }
-
- @Override
- public void finish(LoopEvent event) {
- for (EvioProcessor processor : processors) {
- processor.endJob();
- }
- }
-
- @Override
- public void suspend(LoopEvent event) {
- if (event.getException() != null)
- throw new RecordProcessingException("EVIO error.", event.getException());
- }
-
- private void processEvent(EvioEvent event) {
- for (EvioProcessor processor : processors) {
- processor.process(event);
- }
- }
-
- private void startRun(EvioEvent event) {
- for (EvioProcessor processor : processors) {
- processor.startRun(event);
- }
- }
-
- private void endRun(EvioEvent event) {
- for (EvioProcessor processor : processors) {
- processor.endRun(event);
- }
- }
-}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioFileProducer.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -19,8 +19,9 @@
import org.jlab.coda.jevio.EvioReader;
/**
+ * A utility class for streaming an EVIO file to an ET server.
*
- * This is copied and modified from the EvioProducer class in the ET 12.0 CODA module.
+ * NOTE: Original version was copied from the CODA group's ET java module.
*/
// TODO: Add option to set number of events in the put array.
public final class EvioFileProducer {
@@ -38,6 +39,9 @@
EvioFileProducer() {
}
+ /**
+ * Print usage statement.
+ */
private static void usage() {
System.out.println("\nUsage: java Producer -f <et name> -e <evio file> [-p <server port>] [-host <host>]"
+ " [-d <delay in millisec>] [-g <group #>]\n\n"
@@ -51,14 +55,26 @@
System.exit(1);
}
+ /**
+ * Copy byte buffer to an <code>EtEvent</code>.
+ * @param event The target EtEvent.
+ */
public void copyToEtEvent(EtEvent event) {
event.getDataBuffer().put(byteBuffer);
}
+ /**
+ * The externally accessible main method.
+ * @param args The command line arguments.
+ */
public static void main(String[] args) {
(new EvioFileProducer()).doMain(args); // call wrapper method
}
+ /**
+ * Wrapper method called in main.
+ * @param args The command line arguments.
+ */
public void doMain(String[] args) {
try {
for (int i = 0; i < args.length; i++) {
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioFileSource.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioFileSource.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -12,9 +12,10 @@
import org.jlab.coda.jevio.EvioReader;
/**
- * A very basic implementation of <tt>AbstractRecordSource</tt> for supplying <tt>EvioEvent</tt>
- * objects to a loop from EVIO files. Unlike the LCIO record source, it has no rewind or
- * indexing capabilities (for now at least).
+ * A basic implementation of an <tt>AbstractRecordSource</tt> for supplying <tt>EvioEvent</tt>
+ * objects to a loop from EVIO files.
+ *
+ * Unlike the LCIO record source, it has no rewind or indexing capabilities.
*/
public final class EvioFileSource extends AbstractRecordSource {
@@ -24,16 +25,28 @@
int fileIndex = 0;
boolean atEnd;
+ /**
+ * Constructor taking a list of EVIO files.
+ * @param files The list of EVIO files.
+ */
public EvioFileSource(List<File> files) {
this.files.addAll(files);
openReader();
}
+ /**
+ * Constructor taking a single EVIO file.
+ * @param file The EVIO file.
+ */
public EvioFileSource(File file) {
this.files.add(file);
openReader();
}
+ /**
+ * Open the EVIO reader on the current file from the list.
+ * @throws RuntimeException if an EvioException or IOException occurs while opening file.
+ */
private void openReader() {
try {
System.out.println("Opening reader for file " + files.get(fileIndex) + " ...");
@@ -44,6 +57,9 @@
}
}
+ /**
+ * Close the current reader.
+ */
private void closeReader() {
try {
reader.close();
@@ -52,15 +68,28 @@
}
}
+ /**
+ * Get the current record which is an <code>EvioEvent</code>.
+ * @return The current record.s
+ */
@Override
public Object getCurrentRecord() throws IOException {
return currentEvent;
}
+ /**
+ * True if there are no more files to open in the list.
+ * @return True if there are no more files in the list.
+ */
boolean endOfFiles() {
return fileIndex > (files.size() - 1);
}
+ /**
+ * Load the next record.
+ * @throws NoSuchRecordException if source is exhausted.
+ * @throws IOException if there is an error creating the next EvioEvent.
+ */
@Override
public void next() throws IOException, NoSuchRecordException {
for (;;) {
@@ -83,42 +112,29 @@
return;
}
}
-
+
+ /**
+ * True because source supports loading next record.
+ * @return True because source supports loading next record.
+ */
@Override
- public boolean supportsCurrent() {
- return true;
- }
-
- @Override
public boolean supportsNext() {
return true;
}
+ /**
+ * True if there is a current record loaded.
+ * @return True if there is a current record loaded.
+ */
@Override
- public boolean supportsPrevious() {
- return false;
- }
-
- @Override
- public boolean supportsIndex() {
- return false;
- }
-
- @Override
- public boolean supportsShift() {
- return false;
- }
-
- @Override
- public boolean supportsRewind() {
- return false;
- }
-
- @Override
public boolean hasCurrent() {
return currentEvent != null;
}
+ /**
+ * True if there are more records to load.
+ * @return True if there are more records to load.
+ */
@Override
public boolean hasNext() {
return !atEnd;
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioLoop.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioLoop.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,63 +0,0 @@
-package org.hps.record.evio;
-
-import java.io.IOException;
-
-import org.freehep.record.loop.DefaultRecordLoop;
-import org.freehep.record.source.RecordSource;
-import org.hps.record.ErrorState;
-import org.hps.record.HasErrorState;
-import org.jlab.coda.jevio.EvioEvent;
-
-/**
- * Implementation of record loop for processing <tt>EvioEvent</tt> objects.
- */
-public final class EvioLoop extends DefaultRecordLoop implements HasErrorState {
-
- EvioAdapter adapter = new EvioAdapter();
- ErrorState errorState = new ErrorState();
-
- public EvioLoop() {
- addLoopListener(adapter);
- addRecordListener(adapter);
- }
-
- public void addEvioEventProcessor(EvioProcessor processor) {
- adapter.addEvioEventProcessor(processor);
- }
-
- @Override
- public void setRecordSource(RecordSource source) {
- if (!source.getRecordClass().isAssignableFrom(EvioEvent.class)) {
- System.err.println("The class " + source.getRecordClass().getCanonicalName() + " is invalid.");
- throw new IllegalArgumentException("The record class is invalid.");
- }
- super.setRecordSource(source);
- }
-
- public long loop(long number) throws IOException {
- if (number < 0L) {
- execute(Command.GO, true);
- } else {
- execute(Command.GO_N, number, true);
- execute(Command.STOP);
- }
- Throwable t = getProgress().getException();
- if (t != null && t instanceof IOException)
- throw (IOException) t;
- return getSupplied();
- }
-
- public ErrorState getErrorState() {
- return errorState;
- }
-
- protected void handleClientError(Throwable x) {
- getErrorState().setLastError((Exception) x);
- getErrorState().print();
- }
-
- protected void handleSourceError(Throwable x) {
- getErrorState().setLastError((Exception) x);
- getErrorState().print();
- }
-}
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioProcessor.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioProcessor.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,31 +0,0 @@
-package org.hps.record.evio;
-
-import org.hps.record.RecordProcessor;
-import org.jlab.coda.jevio.EvioEvent;
-
-/**
- * This is the basic abstract class that processors of
- * <tt>EvioEvent</tt> objects should implement.
- */
-public abstract class EvioProcessor implements RecordProcessor<EvioEvent> {
-
- @Override
- public void startJob() {
- }
-
- @Override
- public void startRun(EvioEvent event) {
- }
-
- @Override
- public void process(EvioEvent event) {
- }
-
- @Override
- public void endRun(EvioEvent event) {
- }
-
- @Override
- public void endJob() {
- }
-}
\ No newline at end of file
java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio
--- java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioRecordQueue.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/record-util/src/main/java/org/hps/record/evio/EvioRecordQueue.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,15 +0,0 @@
-package org.hps.record.evio;
-
-import org.hps.record.AbstractRecordQueue;
-import org.jlab.coda.jevio.EvioEvent;
-
-/**
- * A dynamic queue providing <tt>EvioEvent</tt> objects to a loop.
- */
-public final class EvioRecordQueue extends AbstractRecordQueue<EvioEvent> {
-
- @Override
- public Class<EvioEvent> getRecordClass() {
- return EvioEvent.class;
- }
-}
java/branches/hps-java_HPSJAVA-88/steering-files/src/main/resources/org/hps/steering/monitoring
--- java/branches/hps-java_HPSJAVA-88/steering-files/src/main/resources/org/hps/steering/monitoring/ECalMonitoring.lcsim 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/steering-files/src/main/resources/org/hps/steering/monitoring/ECalMonitoring.lcsim 2014-09-16 18:54:38 UTC (rev 1027)
@@ -11,72 +11,40 @@
<driver name="CalibrationDriver"/>
<driver name="EcalRawConverter"/>
<driver name="EcalClusterer"/>
-<!-- <driver name="EcalPedestalPlots"/>-->
<driver name="EcalMonitoringPlots"/>
<driver name="EcalHitPlots"/>
- <driver name="EcalClusterPlots"/>
- <driver name="EcalEvsX"/>
-<!-- <driver name="TriggerPlots"/>-->
-<!--
- <driver name="EcalEventMonitor"/>
<driver name="EcalWindowPlots"/>
- <driver name="EcalDaqPlots"/>
--->
-<!-- <driver name="RunControlDriver" />-->
- <!--<driver name="AidaSaveDriver"/>-->
</execute>
<drivers>
- <!--<driver name="RunControlDriver" type="org.hps.util.RunControlDriver"/>-->
<driver name="CalibrationDriver" type="org.hps.conditions.deprecated.CalibrationDriver"/>
+
<driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
-<!-- <threshold>150</threshold>-->
<applyBadCrystalMap>false</applyBadCrystalMap>
<use2014Gain>false</use2014Gain>
-<!-- <dropBadFADC>true</dropBadFADC>-->
</driver>
- <driver name="EcalDaqPlots" type="org.hps.monitoring.drivers.ecal.EcalDaqPlots">
- </driver>
- <driver name="EcalMonitoringPlots" type="org.hps.monitoring.drivers.ecal.EcalMonitoringPlots">
+
+ <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterer">
+ <ecalName>Ecal</ecalName>
+ <ecalCollectionName>EcalCalHits</ecalCollectionName>
+ </driver>
+
+ <driver name="EcalMonitoringPlots" type="org.hps.monitoring.ecal.plots.EcalMonitoringPlots">
<inputCollection>EcalCalHits</inputCollection>
<eventRefreshRate>100</eventRefreshRate>
</driver>
- <driver name="EcalEventMonitor" type="org.hps.monitoring.drivers.ecal.EcalEventMonitor">
- <eventRefreshRate>1000</eventRefreshRate>
+
+ <driver name="EcalHitPlots" type="org.hps.monitoring.ecal.plots.EcalHitPlots">
+ <inputCollection>EcalCalHits</inputCollection>
</driver>
- <driver name="EcalWindowPlots" type="org.hps.monitoring.drivers.ecal.EcalWindowPlotsXY">
+
+ <driver name="EcalWindowPlots" type="org.hps.monitoring.ecal.plots.EcalWindowPlotsXY">
<inputCollection>EcalReadoutHits</inputCollection>
- </driver>
- <driver name="EcalPedestalPlots" type="org.hps.monitoring.drivers.ecal.EcalPedestalPlots">
- <inputCollection>EcalCalHits</inputCollection>
- <eventRefreshRate>100</eventRefreshRate>
- </driver>
+ </driver>
+
<driver name="EventMarkerDriver"
type="org.lcsim.job.EventMarkerDriver">
<eventInterval>1</eventInterval>
- </driver>
-<!-- <driver name="EcalClusterer" type="org.hps.recon.ecal.HPSEcalCTPClusterer">-->
- <driver name="EcalClusterer" type="org.hps.recon.ecal.EcalClusterer">
-<!-- <clusterWindow>32.0</clusterWindow>-->
- <ecalName>Ecal</ecalName>
- <ecalCollectionName>EcalCalHits</ecalCollectionName>
- </driver>
- <driver name="EcalHitPlots" type="org.hps.monitoring.drivers.ecal.EcalHitPlots">
- <maxE>2.0</maxE>
- <logScale>true</logScale>
- </driver>
- <driver name="EcalClusterPlots" type="org.hps.monitoring.drivers.ecal.EcalClusterPlots">
- <maxE>2.0</maxE>
- <logScale>false</logScale>
- </driver>
- <driver name="EcalEvsX" type="org.hps.monitoring.drivers.ecal.EcalEvsX">
- <targetZ>674</targetZ>
- <inputCollection>EcalClusters</inputCollection>
- </driver>
- <driver name="TriggerPlots" type="org.hps.monitoring.drivers.ecal.TriggerPlots">
- <clusterEnergyCut>0.500</clusterEnergyCut>
- </driver>
- <driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
- <outputFileName>triggerEPlots</outputFileName>
- </driver>
+ </driver>
+
</drivers>
</lcsim>
java/branches/hps-java_HPSJAVA-88/steering-files/src/main/resources/org/hps/steering/recon
--- java/branches/hps-java_HPSJAVA-88/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineTruthRecon.lcsim 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/steering-files/src/main/resources/org/hps/steering/recon/HPS2014OfflineTruthRecon.lcsim 2014-09-16 18:54:38 UTC (rev 1027)
@@ -7,7 +7,8 @@
xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
<execute>
<driver name="EventMarkerDriver"/>
- <driver name="CalibrationDriver"/>
+ <!-- <driver name="CalibrationDriver"/> -->
+ <driver name="ConditionsDriver" />
<driver name="RawTrackerHitSensorSetup"/>
<driver name="RawTrackerHitFitterDriver" />
<driver name="TrackerHitDriver"/>
@@ -22,9 +23,7 @@
<driver name="CleanupDriver"/>
</execute>
<drivers>
- <driver name="CalibrationDriver" type="org.hps.conditions.deprecated.CalibrationDriver">
- <!-- <runNumber>${runNumber}</runNumber>-->
- </driver>
+ <driver name="ConditionsDriver" type="org.hps.conditions.ConditionsDriver" />
<driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
<eventInterval>1000</eventInterval>
</driver>
java/branches/hps-java_HPSJAVA-88/tracking
--- java/branches/hps-java_HPSJAVA-88/tracking/pom.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/tracking/pom.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -55,6 +55,11 @@
<artifactId>commons-math3</artifactId>
<version>3.2</version>
</dependency>
+ <dependency>
+ <groupId>org.freehep</groupId>
+ <artifactId>freehep-jminuit</artifactId>
+ <version>1.0.2-SNAPSHOT</version>
+ </dependency>
</dependencies>
</project>
java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking
--- java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/DataTrackerHitDriver.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/DataTrackerHitDriver.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -38,6 +38,9 @@
private double clusterSeedThreshold = 4.0;
private double clusterNeighborThreshold = 3.0;
private double clusterThreshold = 4.0;
+ private double meanTime = 24.0;
+ private double timeWindow = 48.0;
+ private double neighborDeltaT = 24.0;
private int clusterMaxSize = 10;
private int clusterCentralStripAveragingThreshold = 4;
// Clustering errors by number of TrackerHits.
@@ -85,6 +88,18 @@
this.clusterThreshold = clusterThreshold;
}
+ public void setMeanTime(double meanTime) {
+ this.meanTime = meanTime;
+ }
+
+ public void setTimeWindow(double timeWindow) {
+ this.timeWindow = timeWindow;
+ }
+
+ public void setNeighborDeltaT(double neighborDeltaT) {
+ this.neighborDeltaT = neighborDeltaT;
+ }
+
public void setClusterMaxSize(int clusterMaxSize) {
this.clusterMaxSize = clusterMaxSize;
}
@@ -154,6 +169,9 @@
stripClusteringAlgo.setSeedThreshold(clusterSeedThreshold);
stripClusteringAlgo.setNeighborThreshold(clusterNeighborThreshold);
stripClusteringAlgo.setClusterThreshold(clusterThreshold);
+ stripClusteringAlgo.setMeanTime(meanTime);
+ stripClusteringAlgo.setTimeWindow(timeWindow);
+ stripClusteringAlgo.setNeighborDeltaT(neighborDeltaT);
// hitMaker=new HPSFittedRawTrackerHitMaker(shaperFit);
// Create the clusterers and set hit-making parameters.
java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking
--- java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/FittedRawTrackerHit.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/FittedRawTrackerHit.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,12 +1,15 @@
package org.hps.recon.tracking;
+import org.lcsim.event.GenericObject;
+import org.lcsim.event.LCRelation;
import org.lcsim.event.RawTrackerHit;
import org.lcsim.event.base.BaseLCRelation;
/**
- *
+ *
* @author meeg
- * @version $Id: HPSFittedRawTrackerHit.java,v 1.3 2013/04/16 22:05:43 phansson Exp $
+ * @version $Id: HPSFittedRawTrackerHit.java,v 1.3 2013/04/16 22:05:43 phansson
+ * Exp $
*/
// TODO: Add class documentation.
public class FittedRawTrackerHit extends BaseLCRelation {
@@ -31,6 +34,22 @@
return getShapeFitParameters().getAmp();
}
+ public static RawTrackerHit getRawTrackerHit(LCRelation rel) {
+ return (RawTrackerHit) rel.getFrom();
+ }
+
+ public static GenericObject getShapeFitParameters(LCRelation rel) {
+ return (GenericObject) rel.getTo();
+ }
+
+ public static double getT0(LCRelation rel) {
+ return ShapeFitParameters.getT0(getShapeFitParameters(rel));
+ }
+
+ public static double getAmp(LCRelation rel) {
+ return ShapeFitParameters.getAmp(getShapeFitParameters(rel));
+ }
+
@Override
public String toString() {
return String.format("HPSFittedRawTrackerHit: hit cell id %d on sensor %s with fit %s\n", this.getRawTrackerHit().getCellID(), getRawTrackerHit().getDetectorElement().getName(), this.getShapeFitParameters().toString());
java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking
--- java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/NearestNeighborRMSClusterer.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/NearestNeighborRMSClusterer.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,6 +1,7 @@
package org.hps.recon.tracking;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
@@ -28,6 +29,7 @@
private double _cluster_threshold;
private double _meanTime = 24;
private double _timeWindow = 48;
+ private double _neighborDeltaT = Double.POSITIVE_INFINITY;
private final double _minChiProb = Gamma.regularizedGammaQ(4, 20);
/**
@@ -47,6 +49,18 @@
_cluster_threshold = cluster_threshold;
}
+ public void setMeanTime(double _meanTime) {
+ this._meanTime = _meanTime;
+ }
+
+ public void setTimeWindow(double _timeWindow) {
+ this._timeWindow = _timeWindow;
+ }
+
+ public void setNeighborDeltaT(double _neighborDeltaT) {
+ this._neighborDeltaT = _neighborDeltaT;
+ }
+
/**
* Instantiate NearestNeighborRMS with default thresholds:
*
@@ -101,8 +115,10 @@
// Create maps that show the channel status and relate the channel number to the raw hit
// and vice versa
int mapsize = 2 * base_hits.size();
- Map<Integer, Boolean> clusterable = new HashMap<Integer, Boolean>(mapsize);
- Map<FittedRawTrackerHit, Integer> hit_to_channel = new HashMap<FittedRawTrackerHit, Integer>(mapsize);
+// Map<Integer, Boolean> clusterable = new HashMap<Integer, Boolean>(mapsize);
+ Set<Integer> clusterableSet = new HashSet<Integer>(mapsize);
+
+// Map<FittedRawTrackerHit, Integer> hit_to_channel = new HashMap<FittedRawTrackerHit, Integer>(mapsize);
Map<Integer, FittedRawTrackerHit> channel_to_hit = new HashMap<Integer, FittedRawTrackerHit>(mapsize);
// Create list of channel numbers to be used as cluster seeds
@@ -119,11 +135,10 @@
IIdentifier id = rth.getIdentifier();
int channel_number = sid_helper.getElectrodeValue(id);
- // Check for duplicate RawTrackerHit
- if (hit_to_channel.containsKey(base_hit)) {
- throw new RuntimeException("Duplicate hit: " + id.toString());
- }
-
+// // Check for duplicate RawTrackerHit
+// if (hit_to_channel.containsKey(base_hit)) {
+// throw new RuntimeException("Duplicate hit: " + id.toString());
+// }
// Check for duplicate RawTrackerHits or channel numbers
if (channel_to_hit.containsKey(channel_number)) {
// throw new RuntimeException("Duplicate channel number: "+channel_number);
@@ -136,15 +151,17 @@
}
// Add this hit to the maps that relate channels and hits
- hit_to_channel.put(base_hit, channel_number);
+// hit_to_channel.put(base_hit, channel_number);
channel_to_hit.put(channel_number, base_hit);
// Get the signal from the readout chip
double signal = base_hit.getAmp();
double noiseRMS = HPSSVTCalibrationConstants.getNoise((SiSensor) rth.getDetectorElement(), channel_number);
- double time = base_hit.getT0();
+
// Mark this hit as available for clustering if it is above the neighbor threshold
- clusterable.put(channel_number, signal / noiseRMS >= _neighbor_threshold);
+ if (signal / noiseRMS >= _neighbor_threshold && passChisqCut(base_hit)) {
+ clusterableSet.add(channel_number);
+ }
// Add this hit to the list of seeds if it is above the seed threshold
if (signal / noiseRMS >= _seed_threshold && passTimingCut(base_hit) && passChisqCut(base_hit)) {
@@ -159,7 +176,10 @@
for (int seed_channel : cluster_seeds) {
// First check if this hit is still available for clustering
- if (!clusterable.get(seed_channel)) {
+// if (!clusterable.get(seed_channel)) {
+// continue;
+// }
+ if (!clusterableSet.contains(seed_channel)) {
continue;
}
@@ -167,13 +187,15 @@
List<FittedRawTrackerHit> cluster = new ArrayList<FittedRawTrackerHit>();
double cluster_signal = 0.;
double cluster_noise_squared = 0.;
+ double cluster_weighted_time = 0.;
// Create a queue to hold channels whose neighbors need to be checked for inclusion
LinkedList<Integer> unchecked = new LinkedList<Integer>();
// Add the seed channel to the unchecked list and mark it as unavailable for clustering
unchecked.addLast(seed_channel);
- clusterable.put(seed_channel, false);
+// clusterable.put(seed_channel, false);
+ clusterableSet.remove(seed_channel);
// Check the neighbors of channels added to the cluster
while (unchecked.size() > 0) {
@@ -181,34 +203,34 @@
// Pull the next channel off the queue and add it's hit to the cluster
int clustered_cell = unchecked.removeFirst();
cluster.add(channel_to_hit.get(clustered_cell));
- cluster_signal += channel_to_hit.get(clustered_cell).getAmp();
- cluster_noise_squared += Math.pow(HPSSVTCalibrationConstants.getNoise((SiSensor) (channel_to_hit.get(clustered_cell)).getRawTrackerHit().getDetectorElement(), clustered_cell), 2);
+ FittedRawTrackerHit hit = channel_to_hit.get(clustered_cell);
+ cluster_signal += hit.getAmp();
+ cluster_noise_squared += Math.pow(HPSSVTCalibrationConstants.getNoise((SiSensor) hit.getRawTrackerHit().getDetectorElement(), clustered_cell), 2);
+ cluster_weighted_time += hit.getT0() * hit.getAmp();
// cluster_noise_squared +=0; //need to get the noise from the calib. const. class
// Get the neigbor channels
// Set<Integer> neighbor_channels =
// electrodes.getNearestNeighborCells(clustered_cell);
- Set<Integer> neighbor_channels = getNearestNeighborCells(clustered_cell);
+ Collection<Integer> neighbor_channels = getNearestNeighborCells(clustered_cell);
// Now loop over the neighbors and see if we can add them to the cluster
for (int channel : neighbor_channels) {
- // Get the status of this channel
- Boolean addhit = clusterable.get(channel);
-
- // If the map entry is null, there is no raw hit for this channel
- if (addhit == null) {
+ // Check if this neighbor channel is still available for clustering
+ if (!clusterableSet.contains(channel)) {
continue;
}
- // Check if this neighbor channel is still available for clustering
- if (!addhit) {
+ FittedRawTrackerHit neighbor_hit = channel_to_hit.get(channel);
+ if (Math.abs(neighbor_hit.getT0() - cluster_weighted_time / cluster_signal) > _neighborDeltaT) {
+// System.out.format("new hit t0 %f, cluster t0 %f\n", neighbor_hit.getT0(), cluster_weighted_time / cluster_signal);
continue;
}
// Add channel to the list of unchecked clustered channels
// and mark it unavailable for clustering
unchecked.addLast(channel);
- clusterable.put(channel, false);
+ clusterableSet.remove(channel);
} // end of loop over neighbor cells
} // end of loop over unchecked cells
@@ -226,33 +248,26 @@
}
private boolean passTimingCut(FittedRawTrackerHit hit) {
-
- boolean pass = false;
double time = hit.getT0();
- if (Math.abs(time - _meanTime) < _timeWindow) {
- pass = true;
- }
-
- return pass;
+ return (Math.abs(time - _meanTime) < _timeWindow);
}
private boolean passChisqCut(FittedRawTrackerHit hit) {
return hit.getShapeFitParameters().getChiProb() > _minChiProb;
}
- public int getNeighborCell(int cell, int ncells_0, int ncells_1) {
- int neighbor_cell = cell + ncells_0;
- if (isValidCell(neighbor_cell)) {
- return neighbor_cell;
- } else {
- return -1;
- }
- }
-
- public Set<Integer> getNearestNeighborCells(int cell) {
- Set<Integer> neighbors = new HashSet<Integer>();
+// public int getNeighborCell(int cell, int ncells_0, int ncells_1) {
+// int neighbor_cell = cell + ncells_0;
+// if (isValidCell(neighbor_cell)) {
+// return neighbor_cell;
+// } else {
+// return -1;
+// }
+// }
+ public Collection<Integer> getNearestNeighborCells(int cell) {
+ Collection<Integer> neighbors = new ArrayList<Integer>(2);
for (int ineigh = -1; ineigh <= 1; ineigh = ineigh + 2) {
- int neighbor_cell = getNeighborCell(cell, ineigh, 0);
+ int neighbor_cell = cell + ineigh;
if (isValidCell(neighbor_cell)) {
neighbors.add(neighbor_cell);
}
java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking
--- java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/RawTrackerHitFitterDriver.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/RawTrackerHitFitterDriver.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -2,6 +2,8 @@
import java.util.ArrayList;
import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.hps.conditions.deprecated.HPSSVTCalibrationConstants;
import org.hps.conditions.deprecated.HPSSVTCalibrationConstants.ChannelConstants;
@@ -10,6 +12,7 @@
import org.lcsim.detector.tracker.silicon.SiSensor;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
+import org.lcsim.geometry.Detector;
import org.lcsim.lcio.LCIOConstants;
import org.lcsim.recon.cat.util.Const;
import org.lcsim.util.Driver;
java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking
--- java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -4,6 +4,9 @@
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.logging.Level;
+import java.util.logging.LogManager;
+import java.util.logging.Logger;
import org.apache.commons.math3.linear.Array2DRowRealMatrix;
import org.apache.commons.math3.linear.ArrayRealVector;
import org.apache.commons.math3.linear.CholeskyDecomposition;
@@ -14,8 +17,6 @@
import org.apache.commons.math3.special.Gamma;
import org.freehep.math.minuit.FCNBase;
import org.freehep.math.minuit.FunctionMinimum;
-//import org.freehep.math.minuit.MinosError;
-//import org.freehep.math.minuit.MnMinos;
import org.freehep.math.minuit.MnSimplex;
import org.freehep.math.minuit.MnUserParameters;
import org.hps.conditions.deprecated.HPSSVTCalibrationConstants.ChannelConstants;
@@ -41,26 +42,21 @@
private int firstFittedPulse;
private int nFittedPulses;
private boolean debug = false;
+ private static final Logger minuitLoggger = Logger.getLogger("org.freehep.math.minuit");
public ShaperLinearFitAlgorithm(int nPulses) {
this.nPulses = nPulses;
amplitudes = new double[nPulses];
amplitudeErrors = new double[nPulses];
- System.setErr(new PrintStream(new OutputStream() {
- public void write(int b) {
- }
- }));
}
+ @Override
public void setDebug(boolean debug) {
this.debug = debug;
if (debug) {
- System.setErr(System.err);
+ minuitLoggger.setLevel(Level.INFO);
} else {
- System.setErr(new PrintStream(new OutputStream() {
- public void write(int b) {
- }
- }));
+ minuitLoggger.setLevel(Level.OFF);
}
}
@@ -97,10 +93,11 @@
FunctionMinimum min = doRecursiveFit(signal);
// if (!min.isValid() && nPulses == 2) {
// System.out.format("bad fit to %d pulses, chisq %f\n", nPulses, min.fval());
-// debug = true;
-// doRecursiveFit(signal);
-// debug = false;
-//
+// if (!debug) {
+// debug = true;
+// doRecursiveFit(signal);
+// debug = false;
+// }
// }
double chisq = evaluateMinimum(min);
java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking
--- java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/ShaperPileupFitAlgorithm.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/tracking/src/main/java/org/hps/recon/tracking/ShaperPileupFitAlgorithm.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -15,6 +15,9 @@
ShaperLinearFitAlgorithm twoPulseFitter = new ShaperLinearFitAlgorithm(2);
private boolean debug = false;
private double refitThreshold = 0.5;
+ private int totalFits = 0;
+ private int refitAttempts = 0;
+ private int refitsAccepted = 0;
public ShaperPileupFitAlgorithm() {
}
@@ -26,18 +29,26 @@
public Collection<ShapeFitParameters> fitShape(RawTrackerHit rth, HPSSVTCalibrationConstants.ChannelConstants constants) {
Collection<ShapeFitParameters> fittedPulses = onePulseFitter.fitShape(rth, constants);
double singlePulseChiProb = fittedPulses.iterator().next().getChiProb();
+ totalFits++;
if (singlePulseChiProb < refitThreshold) {
+ refitAttempts++;
Collection<ShapeFitParameters> doublePulse = twoPulseFitter.fitShape(rth, constants);
double doublePulseChiProb = doublePulse.iterator().next().getChiProb();
if (doublePulseChiProb > singlePulseChiProb) {
+ refitsAccepted++;
fittedPulses = doublePulse;
}
}
+ if (debug && totalFits % 10000 == 0) {
+ System.out.format("%d fits, %d refit attempts, %d refits accepted\n", totalFits, refitAttempts, refitsAccepted);
+ }
return fittedPulses;
}
public void setDebug(boolean debug) {
this.debug = debug;
+ onePulseFitter.setDebug(debug);
+ twoPulseFitter.setDebug(debug);
}
}
java/branches/hps-java_HPSJAVA-88/users
--- java/branches/hps-java_HPSJAVA-88/users/pom.xml 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/users/pom.xml 2014-09-16 18:54:38 UTC (rev 1027)
@@ -19,6 +19,10 @@
<groupId>org.hps</groupId>
<artifactId>hps-analysis</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.hps</groupId>
+ <artifactId>hps-record-util</artifactId>
+ </dependency>
</dependencies>
<build>
<plugins>
java/branches/hps-java_HPSJAVA-88/users/src/main/java/org/hps/users/omoreno
--- java/branches/hps-java_HPSJAVA-88/users/src/main/java/org/hps/users/omoreno/SvtPerformance.java 2014-09-16 18:43:13 UTC (rev 1026)
+++ java/branches/hps-java_HPSJAVA-88/users/src/main/java/org/hps/users/omoreno/SvtPerformance.java 2014-09-16 18:54:38 UTC (rev 1027)
@@ -1,6 +1,5 @@
package org.hps.users.omoreno;
-
import hep.aida.IHistogram1D;
import hep.aida.IPlotter;
import hep.physics.vec.BasicHep3Vector;
@@ -29,7 +28,7 @@
/**
* Driver that looks at the performance of the SVT.
- *
+ *
* @author Omar Moreno <[log in to unmask]>
* @version $Id:$
*/
@@ -38,19 +37,19 @@
private AIDA aida;
private List<IPlotter> plotters = new ArrayList<IPlotter>();
ShaperAnalyticFitAlgorithm shaperFitter = new ShaperAnalyticFitAlgorithm();
-
+
File performanceOutputFile;
BufferedWriter performanceWriter;
- File samplesOutputFile;
- BufferedWriter samplesWriter;
+ File samplesOutputFile;
+ BufferedWriter samplesWriter;
String performanceOutputFileName = "svt_performance.dat";
String samplesOutputFileName = "samples.dat";
int plotterIndex = 0;
- int eventNumber = 0;
- int runNumber = 0;
+ int eventNumber = 0;
+ int runNumber = 0;
double totalTracks = 0;
double totalTwoTrackEvents = 0;
@@ -65,98 +64,97 @@
// Plot Flags
boolean plotClustersPerLayer = false;
boolean plotMIP = false;
- boolean plotSamples = false;
- boolean batchMode = true;
-
- public SvtPerformance(){
+ boolean plotSamples = false;
+ boolean batchMode = true;
+
+ public SvtPerformance() {
}
// --- Setters ---//
// ---------------//
-
/**
* Enable/disble debug mode
- *
+ *
* @param true or false
- *
+ *
*/
- public void setDebug(boolean debug){
+ public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Enable/disable plotting the number of clusters per layer. Only clusters
* from stereo hits associated with a track are used.
- *
+ *
* @param true or false
- *
+ *
*/
- public void setPlotClustersPerLayer(boolean plotClustersPerLayer){
+ public void setPlotClustersPerLayer(boolean plotClustersPerLayer) {
this.plotClustersPerLayer = plotClustersPerLayer;
}
/**
* Enable/disable plotting the cluster charge. Only clusters from stereo
* hits associated with a track are used.
- *
+ *
* @param true or false
- *
+ *
*/
- public void setPlotClusterCharge(boolean plotMIP){
+ public void setPlotClusterCharge(boolean plotMIP) {
this.plotMIP = plotMIP;
}
/**
- * Enable/disable plotting of raw hit samples. Only raw hits from clusters
+ * Enable/disable plotting of raw hit samples. Only raw hits from clusters
* associated with a track are used.
- *
+ *
* @param true or false
- *
+ *
*/
- public void setPlotSamples(boolean plotSamples){
- this.plotSamples = plotSamples;
+ public void setPlotSamples(boolean plotSamples) {
+ this.plotSamples = plotSamples;
}
-
+
/**
- * Enable/disable batch mode. If set to true, plots are not shown.
+ * Enable/disable batch mode. If set to true, plots are not shown.
*
* @param true or false
- *
+ *
*/
- public void setBatchMode(boolean batchMode){
- this.batchMode = batchMode;
+ public void setBatchMode(boolean batchMode) {
+ this.batchMode = batchMode;
}
-
+
/**
* Set the name of the file to which performance data will be written.
- *
+ *
* @param peformanceOutputFileName: Name of the output file
- *
+ *
*/
- public void setPerformanceOutputFileName(String performanceOutputFileName){
+ public void setPerformanceOutputFileName(String performanceOutputFileName) {
this.performanceOutputFileName = performanceOutputFileName;
}
/**
* Set the name of the file to which raw sample data will be written.
- *
+ *
* @param samplesOutputFileName : Name of the output file
- *
+ *
*/
- public void setSamplesOutputFileName(String samplesOutputFileName){
+ public void setSamplesOutputFileName(String samplesOutputFileName) {
this.samplesOutputFileName = samplesOutputFileName;
}
-
+
/**
* Set the run number
- *
- * @param runNumber
- *
+ *
+ * @param runNumber
+ *
*/
- public void setRunNumber(int runNumber){
- this.runNumber = runNumber;
+ public void setRunNumber(int runNumber) {
+ this.runNumber = runNumber;
}
-
+
protected void detectorChanged(Detector detector) {
this.printDebug("Setting up plots");
@@ -164,7 +162,7 @@
// setup AIDA
aida = AIDA.defaultInstance();
aida.tree().cd("/");
-
+
// Get the list of sensors from the detector
List<HpsSiSensor> sensors = detector.getDetectorElement().findDescendants(HpsSiSensor.class);
@@ -174,10 +172,9 @@
// --- Clusters Per Layer ---//
// --------------------------//
+ if (plotClustersPerLayer) {
- if(plotClustersPerLayer){
-
- if(!batchMode){
+ if (!batchMode) {
plotters.add(PlotUtils.setupPlotter("# Clusters Per Layer", 1, 2));
for (HpsSiSensor sensor : sensors) {
if (sensor.isTopLayer()) {
@@ -197,21 +194,23 @@
// --- MIP Plots ---//
// -----------------//
- if(plotMIP){
+ if (plotMIP) {
try {
-
+
performanceOutputFile = new File(performanceOutputFileName);
- if (!performanceOutputFile.exists()) performanceOutputFile.createNewFile();
-
+ if (!performanceOutputFile.exists()) {
+ performanceOutputFile.createNewFile();
+ }
+
performanceWriter = new BufferedWriter(new FileWriter(performanceOutputFile.getAbsoluteFile()));
-
+
} catch (IOException exception) {
exception.printStackTrace();
}
-
- try{
+ try {
+
performanceWriter.write("! run I\n");
performanceWriter.write("! event I\n");
performanceWriter.write("! volume I\n");
@@ -226,13 +225,12 @@
performanceWriter.write("! hit_y D\n");
performanceWriter.write("! trk_chi_squared D\n");
performanceWriter.write("! hit_time D\n");
-
-
- } catch(IOException exception){
+
+ } catch (IOException exception) {
exception.printStackTrace();
}
-
- if(!batchMode){
+
+ if (!batchMode) {
plotters.add(PlotUtils.setupPlotter("Cluster Charge", 5, 4));
for (HpsSiSensor sensor : sensors) {
if (sensor.isTopLayer()) {
@@ -247,24 +245,25 @@
plotterIndex++;
}
}
-
+
//--- Sample Plots ---//
//--------------------//
- if(plotSamples){
+ if (plotSamples) {
try {
-
+
samplesOutputFile = new File(samplesOutputFileName);
- if (!samplesOutputFile.exists()) samplesOutputFile.createNewFile();
-
+ if (!samplesOutputFile.exists()) {
+ samplesOutputFile.createNewFile();
+ }
+
samplesWriter = new BufferedWriter(new FileWriter(samplesOutputFile.getAbsoluteFile()));
-
+
} catch (IOException exception) {
exception.printStackTrace();
}
-
-
- try{
+
+ try {
samplesWriter.write("! run I\n");
samplesWriter.write("! event I\n");
samplesWriter.write("! volume I\n");
@@ -277,119 +276,126 @@
samplesWriter.write("! sample5 I\n");
samplesWriter.write("! sample6 I\n");
samplesWriter.write("! pedestal D\n");
-
-
- } catch(IOException exception){
-
+
+ } catch (IOException exception) {
+
}
-
+
}
-
- if(batchMode) return;
-
+
+ if (batchMode) {
+ return;
+ }
+
// Show the plotters
- for (IPlotter plotter : plotters) plotter.show();
+ for (IPlotter plotter : plotters) {
+ plotter.show();
+ }
}
public void process(EventHeader event) {
- eventNumber++;
-
- if (!event.hasCollection(Track.class, trackCollectionName))
+ eventNumber++;
+
+ if (!event.hasCollection(Track.class, trackCollectionName)) {
return;
+ }
List<Track> tracks = event.get(Track.class, trackCollectionName);
HpsSiSensor sensor = null;
String plotTitle = null;
int channel, bad_channel;
- int maxClusterChannel = 0;
- int hitsPerCluster = 0;
+ int maxClusterChannel = 0;
+ int hitsPerCluster = 0;
ChannelConstants constants = null;
- ShapeFitParameters fit = null;
double clusterAmplitude, maxClusterAmplitude;
double noise = 0;
- double chiSquared = -1;
double trkChiSquared = 0;
- double hitTime = 0;
- double hitX, hitY, pedestal;
- short[] samples;
+ double hitTime = 0;
+ double hitX, hitY, pedestal;
+ short[] samples;
// Loop over all tracks in an event
for (Track track : tracks) {
- trkChiSquared = 0;
- trkChiSquared = track.getChi2();
-
- if((new BasicHep3Vector(track.getTrackStates().get(0).getMomentum())).magnitude() <= .500) continue;
-
- double[] topClusters = new double[10];
+ trkChiSquared = 0;
+ trkChiSquared = track.getChi2();
+
+ if ((new BasicHep3Vector(track.getTrackStates().get(0).getMomentum())).magnitude() <= .500) {
+ continue;
+ }
+
+ double[] topClusters = new double[10];
double[] bottomClusters = new double[10];
// Loop over all stereo hits associated with a track
- hitX = 0; hitY = 0;
+ hitX = 0;
+ hitY = 0;
for (TrackerHit trackerHit : track.getTrackerHits()) {
-
- hitX = trackerHit.getPosition()[1];
- hitY = trackerHit.getPosition()[2];
-
+
+ hitX = trackerHit.getPosition()[1];
+ hitY = trackerHit.getPosition()[2];
+
// Loop over the strip hits used to crate the stereo hit
- hitTime = 0;
+ hitTime = 0;
for (HelicalTrackStrip stripHit : ((HelicalTrackCross) trackerHit).getStrips()) {
-
- hitTime = stripHit.time();
-
+
+ hitTime = stripHit.time();
+
sensor = (HpsSiSensor) ((RawTrackerHit) stripHit.rawhits().get(0)).getDetectorElement();
- if (sensor.isTopLayer())
+ if (sensor.isTopLayer()) {
topClusters[sensor.getLayerNumber() - 1] += 1;
- else
+ } else {
bottomClusters[sensor.getLayerNumber() - 1] += 1;
+ }
maxClusterAmplitude = 0;
clusterAmplitude = 0;
hitsPerCluster = stripHit.rawhits().size();
noise = 0;
bad_channel = 0;
- chiSquared = -1;
+ double chiSquaredProb = -1;
for (Object rh : stripHit.rawhits()) {
RawTrackerHit rawHit = (RawTrackerHit) rh;
channel = rawHit.getIdentifierFieldValue("strip");
// Check if the channel neighbors a channel that has been tagged as bad
- if(HPSSVTCalibrationConstants.isBadChannel(sensor, channel+1)
- || HPSSVTCalibrationConstants.isBadChannel(sensor, channel-1)){
- bad_channel = 1;
+ if (HPSSVTCalibrationConstants.isBadChannel(sensor, channel + 1)
+ || HPSSVTCalibrationConstants.isBadChannel(sensor, channel - 1)) {
+ bad_channel = 1;
}
-
- if(plotSamples){
+
+ if (plotSamples) {
samples = rawHit.getADCValues();
- pedestal = sensor.getPedestal(channel);
-
+ pedestal = sensor.getPedestal(channel, 0);
+
try {
- if(sensor.isTopLayer()){
+ if (sensor.isTopLayer()) {
samplesWriter.write(runNumber + " " + eventNumber + " 0 " + sensor.getLayerNumber() + " ");
} else {
samplesWriter.write(runNumber + " " + eventNumber + " 1 " + sensor.getLayerNumber() + " ");
}
samplesWriter.write(channel + " " + samples[0] + " " + samples[1] + " " + samples[2] + " "
- + samples[3] + " " + samples[4] + " " + samples[5] + " " + pedestal + "\n");
+ + samples[3] + " " + samples[4] + " " + samples[5] + " " + pedestal + "\n");
} catch (IOException exception) {
exception.printStackTrace();
}
}
-
- //constants = HPSSVTCalibrationConstants.getChannelConstants(sensor, channel);
- // fit = shaperFitter.fitShape(rawHit);
- if (fit.getAmp() > maxClusterAmplitude) {
- maxClusterChannel = channel;
- maxClusterAmplitude = fit.getAmp();
+
+ constants = HPSSVTCalibrationConstants.getChannelConstants(sensor, channel);
+ for (ShapeFitParameters fit : shaperFitter.fitShape(rawHit, constants)) {
+ if (fit.getAmp() > maxClusterAmplitude) {
+ maxClusterChannel = channel;
+ maxClusterAmplitude = fit.getAmp();
+ }
+ if (stripHit.rawhits().size() == 1) {
+ chiSquaredProb = fit.getChiProb();
+ }
+ noise += Math.pow(sensor.getNoise(channel, 0), 2);
+ clusterAmplitude += fit.getAmp();
}
- if(stripHit.rawhits().size() == 1){
- // chiSquared = fit.getChiSq();
- }
- noise += Math.pow(sensor.getNoise(channel), 2);
- clusterAmplitude += fit.getAmp();
}
noise = Math.sqrt(noise);
-
+
if (plotMIP) {
try {
if (sensor.isTopLayer()) {
@@ -399,13 +405,13 @@
plotTitle = "Bottom - Layer " + sensor.getLayerNumber() + " - Cluster Charge";
performanceWriter.write(runNumber + " " + eventNumber + " 1 " + sensor.getLayerNumber() + " ");
}
- performanceWriter.write(maxClusterChannel + " " + clusterAmplitude + " " + noise + " " + hitsPerCluster + " "
- + bad_channel + " " + chiSquared + " " + hitX + " " + hitY + " " + trkChiSquared + " "
- + hitTime + "\n");
+ performanceWriter.write(maxClusterChannel + " " + clusterAmplitude + " " + noise + " " + hitsPerCluster + " "
+ + bad_channel + " " + chiSquaredProb + " " + hitX + " " + hitY + " " + trkChiSquared + " "
+ + hitTime + "\n");
} catch (IOException exception) {
- exception.printStackTrace();
+ exception.printStackTrace();
}
- if(!batchMode){
+ if (!batchMode) {
aida.histogram1D(plotTitle).fill(clusterAmplitude);
}
}
@@ -432,8 +438,9 @@
* print debug statements
*/
public void printDebug(String debugStatement) {
- if (!debug)
+ if (!debug) {
return;
+ }
System.out.println(this.getClass().getSimpleName() + ": " + debugStatement);
}
@@ -442,7 +449,7 @@
try {
performanceWriter.close();
- samplesWriter.close();
+ samplesWriter.close();
} catch (IOException exception) {
exception.printStackTrace();
}
SVNspam 0.1