Author: [log in to unmask]
Date: Mon Mar 21 18:04:10 2016
New Revision: 4318
Log:
Merge in trunk changes to dev branch.
Added:
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/dataquality/MuonCandidateMonitoring.java
- copied unchanged from r4317, java/trunk/analysis/src/main/java/org/hps/analysis/dataquality/MuonCandidateMonitoring.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalTimeWalk.java
- copied unchanged from r4317, java/trunk/conditions/src/main/java/org/hps/conditions/ecal/EcalTimeWalk.java
java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/users/holly/PhysicsRun2016_FEEIter_Filter.lcsim
- copied unchanged from r4317, java/trunk/steering-files/src/main/resources/org/hps/steering/users/holly/PhysicsRun2016_FEEIter_Filter.lcsim
java/branches/jeremy-dev2/users/src/main/java/org/hps/users/celentan/LedOnlineDataDumpDriver.java
- copied unchanged from r4317, java/trunk/users/src/main/java/org/hps/users/celentan/LedOnlineDataDumpDriver.java
java/branches/jeremy-dev2/users/src/main/java/org/hps/users/meeg/SVTPhaseOffsetReader.java
- copied unchanged from r4317, java/trunk/users/src/main/java/org/hps/users/meeg/SVTPhaseOffsetReader.java
java/branches/jeremy-dev2/users/src/main/java/org/hps/users/spaul/FindBiasOnRange.java
- copied unchanged from r4317, java/trunk/users/src/main/java/org/hps/users/spaul/FindBiasOnRange.java
Modified:
java/branches/jeremy-dev2/ (props changed)
java/branches/jeremy-dev2/analysis/pom.xml
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java
java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java
java/branches/jeremy-dev2/conditions/ (props changed)
java/branches/jeremy-dev2/conditions/pom.xml
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/package-info.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/AbstractConditionsObjectConverter.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/package-info.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/package-info.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java
java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java
java/branches/jeremy-dev2/crawler/pom.xml
java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java
java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java
java/branches/jeremy-dev2/detector-data/pom.xml
java/branches/jeremy-dev2/detector-model/pom.xml
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java
java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java
java/branches/jeremy-dev2/distribution/ (props changed)
java/branches/jeremy-dev2/distribution/pom.xml
java/branches/jeremy-dev2/ecal-event-display/pom.xml
java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java
java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java
java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java
java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java
java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java
java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java (contents, props changed)
java/branches/jeremy-dev2/ecal-readout-sim/pom.xml
java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java
java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java
java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java
java/branches/jeremy-dev2/ecal-recon/pom.xml
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java
java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java
java/branches/jeremy-dev2/evio/pom.xml
java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java
java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioReader.java
java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioToLcio.java
java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java
java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/SvtEvioReader.java
java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java
java/branches/jeremy-dev2/job/pom.xml
java/branches/jeremy-dev2/job/src/main/java/org/hps/job/DatabaseConditionsManagerSetup.java
java/branches/jeremy-dev2/job/src/main/java/org/hps/job/JobManager.java
java/branches/jeremy-dev2/logging/pom.xml
java/branches/jeremy-dev2/monitoring-app/ (props changed)
java/branches/jeremy-dev2/monitoring-app/pom.xml
java/branches/jeremy-dev2/monitoring-drivers/pom.xml
java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java
java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java
java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java
java/branches/jeremy-dev2/monitoring-util/pom.xml
java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java
java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java
java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java
java/branches/jeremy-dev2/parent/pom.xml
java/branches/jeremy-dev2/plugin/pom.xml
java/branches/jeremy-dev2/pom.xml
java/branches/jeremy-dev2/recon/pom.xml
java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java
java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java
java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java
java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java
java/branches/jeremy-dev2/record-util/pom.xml
java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java
java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java
java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java
java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java
java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java
java/branches/jeremy-dev2/run-database/pom.xml
java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EpicsVariable.java
java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EvioDataBuilder.java
java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunManager.java
java/branches/jeremy-dev2/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java
java/branches/jeremy-dev2/steering-files/pom.xml
java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim
java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim
java/branches/jeremy-dev2/tracking/pom.xml
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/readout/svt/FpgaData.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/WTrack.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/straight/STUtils.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java
java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java
java/branches/jeremy-dev2/users/pom.xml
java/branches/jeremy-dev2/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java
java/branches/jeremy-dev2/util/pom.xml
java/branches/jeremy-dev2/util/src/main/java/org/hps/util/MergeBunches.java
Modified: java/branches/jeremy-dev2/analysis/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/analysis/pom.xml (original)
+++ java/branches/jeremy-dev2/analysis/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/analysis/</url>
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/dataquality/TridentMonitoring.java Mon Mar 21 18:04:10 2016
@@ -74,8 +74,6 @@
}
private final static Logger LOGGER = Logger.getLogger(TridentMonitoring.class.getPackage().getName());
-
-
private final BasicHep3Matrix beamAxisRotation = new BasicHep3Matrix();
// private static final int nCuts = 9;
@@ -314,8 +312,8 @@
this.beamSize[2] = beamSizeY;
}
-
double ebeam;
+
@Override
protected void detectorChanged(Detector detector) {
LOGGER.info("TridendMonitoring::detectorChanged Setting up the plotter");
@@ -349,15 +347,15 @@
// triTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Track time difference", 100, -10, 10);
// triTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Track time vs. track time", 100, -10, 10, 100, -10, 10);
triTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
- triDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Positron - electron momentum", 100, -1., 1.0);
+ triDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Positron - electron momentum", 100, -ebeam, ebeam);
triSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
- triPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- triPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
-
- triMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1);
+ triPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ triPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
+
+ triMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1 * ebeam);
triZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, -v0VzMax, v0VzMax);
triMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex mass", 100, 0, 0.1 * ebeam);
- triZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
+ triZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
// triX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex X", 100, -v0VxMax, v0VxMax);
// triY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Y", 100, -v0VyMax, v0VyMax);
// triZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Trident: Vertex Z", 100, -v0VzMax, v0VzMax);
@@ -374,15 +372,15 @@
triRadTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Track time vs. track time", 100, -10, 10, 100, -10, 10);
triRadTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
- triRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron - electron momentum", 100, -1., 1.0);
+ triRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron - electron momentum", 100, -ebeam, ebeam);
triRadSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
- triRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- triRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
+ triRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ triRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
// triRadMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex mass vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, 0, 0.1);
// triRadZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. vertex momentum", 100, v0PzMin, v0PzMax, 100, -v0VzMax, v0VzMax);
- triRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex mass", 100, 0, 0.11);
- triRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
+ triRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex mass", 100, 0, 0.1 * ebeam);
+ triRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
// triRadX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex X", 100, -v0VxMax, v0VxMax);
// triRadY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Y", 100, -v0VyMax, v0VyMax);
// triRadZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative trident: Vertex Z", 100, -v0VzMax, v0VzMax);
@@ -398,15 +396,15 @@
// vertTrackTimeDiff = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Track time difference", 100, -10, 10);
// vertTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Track time vs. track time", 100, -10, 10, 100, -10, 10);
vertTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
- vertDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Positron - electron momentum", 100, -1., 1.0);
+ vertDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Positron - electron momentum", 100, -ebeam, ebeam);
vertSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
- vertPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- vertPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
-
- vertMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1);
+ vertPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ vertPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
+
+ vertMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1 * ebeam);
vertZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, -v0VzMax, v0VzMax);
- vertMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass", 100, 0, 0.11);
- vertZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
+ vertMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex mass", 100, 0, 0.1 * ebeam);
+ vertZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
// vertX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex X", 100, -v0VxMax, v0VxMax);
vertY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Y", 100, -v0VyMax, v0VyMax);
// vertZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Vertex: Vertex Z", 100, -v0VzMax, v0VzMax);
@@ -423,15 +421,15 @@
vertRadTrackTime2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Track time vs. track time", 100, -10, 10, 100, -10, 10);
vertRadTrackMomentum2D = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron vs. electron momentum", 100, 0, v0PzMax * ebeam, 100, 0, v0PzMax * ebeam);
- vertRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron - electron momentum", 100, -1., 1.0);
+ vertRadDeltaP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron - electron momentum", 100, -ebeam, ebeam);
vertRadSumP = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Positron + electron momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam);
- vertRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Py(e) vs Py(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
- vertRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Px(e) vs Px(p)", 50, -0.04, 0.04, 50, -0.04, 0.04);
-
- vertRadMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1);
+ vertRadPyEleVsPyPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Py(e) vs Py(p)", 50, -v0PyMax * ebeam, v0PyMax * ebeam, 50, -v0PyMax * ebeam, v0PyMax * ebeam);
+ vertRadPxEleVsPxPos = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Px(e) vs Px(p)", 50, -v0PxMax * ebeam, v0PxMax * ebeam, 50, -v0PxMax * ebeam, v0PxMax * ebeam);
+
+ vertRadMassMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, 0, 0.1 * ebeam);
vertRadZVsMomentum = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. vertex momentum", 100, v0PzMin * ebeam, v0PzMax * ebeam, 100, -v0VzMax, v0VzMax);
- vertRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass", 100, 0, 0.11);
- vertRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. mass", 100, 0, 0.11, 100, -v0VzMax, v0VzMax);
+ vertRadMass = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex mass", 100, 0, 0.1 * ebeam);
+ vertRadZVsMass = aida.histogram2D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z vs. mass", 100, 0, 0.1 * ebeam, 100, -v0VzMax, v0VzMax);
vertRadX = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex X", 100, -v0VxMax, v0VxMax);
vertRadY = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Y", 100, -v0VyMax, v0VyMax);
vertRadZ = aida.histogram1D(plotDir + trkType + triggerType + "/" + "Radiative vertex: Vertex Z", 100, -v0VzMax, v0VzMax);
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/FEEClusterPlotter.java Mon Mar 21 18:04:10 2016
@@ -44,6 +44,28 @@
private String outputPlots = null;
+ //Set min energy in histo
+ private double minHistoE = 0.5;
+
+ //Set max energy in histo
+ private double maxHistoE = 1.3;
+
+ /**
+ * Set the minimum histogram energy
+ * @param minHistoE
+ */
+ void setMinHistoE(double minHistoE) {
+ this.minHistoE = minHistoE;
+ }
+
+ /**
+ * Set the maximum histogram energy
+ * @param maxHistoE
+ */
+ void setMaxHistoE(double maxHistoE) {
+ this.maxHistoE = maxHistoE;
+ }
+
@Override
protected void detectorChanged(Detector detector) {
@@ -52,14 +74,72 @@
aida.tree().cd("/");
for (EcalChannel cc : ecalConditions.getChannelCollection()) {
- //aida.histogram1D(getHistoName(cc),200,0.5,1.3);
- aida.histogram1D(getHistoName(cc),200,0.9,2.8);
+ aida.histogram1D(getHistoName(cc),200,minHistoE,maxHistoE);
}
}
private String getHistoName(EcalChannel cc) {
return String.format(histoNameFormat,cc.getChannelId());
+ }
+
+
+ //Set min seed energy value, default to 2015 run
+ private double seedCut = 0.4;
+
+ //set min cluster time in window, default to 2015 run
+ private double minTime = 30;
+
+ //set max cluster time in window, default to 2015 run
+ private double maxTime = 070;
+
+ //set min number of hits in a cluster in row 1, default to 2015 run
+ private int hitCut = 5;
+
+ //hit cut is only used in 2016 data, not 2015
+ boolean useHitCut = false;
+
+
+ /**
+ * Set the cut value for seed energy in GeV
+ * @param seedCut
+ */
+ void setSeedCut(double seedCut) {
+ this.seedCut = seedCut;
+ }
+
+ /**
+ * Set the min time in window to look for cluster
+ * @param minTime
+ */
+ void setMinTime(double minTime) {
+ this.minTime = minTime;
+ }
+
+ /**
+ * Set the max time in window to look for cluster
+ * @param maxTime
+ */
+ void setMaxTime(double maxTime) {
+ this.maxTime = maxTime;
+ }
+
+ /**
+ * Set the hit cut value for hits in cluster
+ * This cut is used in 2016 running (not 2015)
+ * @param hitCut
+ */
+ void setHitCut(int hitCut) {
+ this.hitCut = hitCut;
+ }
+
+ /**
+ * Set the hit cut value for hits in cluster
+ * This cut is used in 2016 running (not 2015)
+ * @param hitCut
+ */
+ void setUseHitCut(boolean useHitCut) {
+ this.useHitCut = useHitCut;
}
public void process(EventHeader event) {
@@ -90,12 +170,30 @@
double clusE = clus.getEnergy();
double time = seed.getTime();
- //if ((seedE/clusE > 0.6) && seedE >0.45 && time>30 && time <70){
- if ((seedE/clusE > 0.6) && seedE >0.65 && time>30 && time <70){
+ //in 2015, not hit count cut used at all
+ if (useHitCut){
+ if (Math.abs(seed.getIdentifierFieldValue("iy"))==1 && (seedE/clusE > 0.6) && seedE >seedCut
+ && time>minTime && time <maxTime && hits.size()>(hitCut+2) ){
- EcalChannel cc = findChannel(seed);
- aida.histogram1D(getHistoName(cc)).fill(clusE);
+ EcalChannel cc = findChannel(seed);
+ aida.histogram1D(getHistoName(cc)).fill(clusE);
+ }
+ else if (Math.abs(seed.getIdentifierFieldValue("iy"))==1 && (seedE/clusE > 0.6) && seedE >seedCut
+ && time>minTime && time <maxTime && hits.size()>(hitCut) ){
+
+ EcalChannel cc = findChannel(seed);
+ aida.histogram1D(getHistoName(cc)).fill(clusE);
+ }
}
+ else {
+ if ((seedE/clusE > 0.6) && seedE >seedCut
+ && time>minTime && time <maxTime ){
+
+ EcalChannel cc = findChannel(seed);
+ aida.histogram1D(getHistoName(cc)).fill(clusE);
+
+ }
+ }
}
}
}
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/DualThresholdSignalFitDriver.java Mon Mar 21 18:04:10 2016
@@ -182,7 +182,7 @@
* Process the event, performing a signal fit for every raw data hit in the input collection.
* The hits that pass the sigma selection cut are added to a new hits collection, which can be
* converted to a CalorimeterHit collection and then clustered.
- * @throw NextEventException if there are not enough hits that pass the selection cut.
+ * @throws NextEventException if there are not enough hits that pass the selection cut.
*/
public void process(EventHeader event) {
if (event.hasCollection(RawTrackerHit.class, inputHitsCollectionName)) {
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeHitSelectionDriver.java Mon Mar 21 18:04:10 2016
@@ -54,7 +54,7 @@
/**
* Set the number of ADC samples in a row which must be above the threshold.
- * @param selectedHits The minimum number of samples above threshold.
+ * @param minimumSelectedSamples The minimum number of samples above threshold.
*/
public void setMinimumSelectedSamples(int minimumSelectedSamples) {
this.minimumSelectedSamples = minimumSelectedSamples;
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/ecal/cosmic/RawModeSignalFitDriver.java Mon Mar 21 18:04:10 2016
@@ -168,7 +168,7 @@
* Process the event, performing a signal fit for every raw data hit in the input collection.
* The hits that pass the sigma selection cut are added to a new hits collection, which can be
* converted to a CalorimeterHit collection and then clustered.
- * @throw NextEventException if there are not enough hits that pass the selection cut.
+ * @throws NextEventException if there are not enough hits that pass the selection cut.
*/
public void process(EventHeader event) {
if (event.hasCollection(RawTrackerHit.class, inputHitsCollectionName)) {
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/DataTriggerSimDriver.java Mon Mar 21 18:04:10 2016
@@ -48,9 +48,9 @@
* output object is not persisted into LCIO after runtime.
*
* @author Kyle McCarty <[log in to unmask]>
- * @see DAQConfigDriver
- * @see EcalOnlineRawConverterDriver
- * @see GTPOnlineClusterDriver
+ * @see org.hps.record.daqconfig.DAQConfigDriver
+ * @see org.hps.recon.ecal.EcalOnlineRawConverterDriver
+ * @see org.hps.recon.ecal.cluster.GTPOnlineClusterDriver
* @see SimTriggerData
*/
public class DataTriggerSimDriver extends Driver {
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/DiagnosticSnapshot.java Mon Mar 21 18:04:10 2016
@@ -95,7 +95,7 @@
* Gets the total number of events where the TI reported a trigger
* of the specified type.
* @param triggerID - The identifier for the type of trigger.
- * @param unique - <code>true</code> returns only the number of
+ * @param hierarchical - <code>true</code> returns only the number of
* events where this trigger type was the <i>only</i> type seen by
* the TI while <code>false</code> returns the number of events
* that saw this trigger type without regards for other trigger
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerDiagStats.java Mon Mar 21 18:04:10 2016
@@ -89,7 +89,7 @@
* Gets the total number of events where the TI reported a trigger
* of the specified type.
* @param triggerID - The identifier for the type of trigger.
- * @param unique - <code>true</code> returns only the number of
+ * @param hierarchical - <code>true</code> returns only the number of
* events where this trigger type was the <i>only</i> type seen by
* the TI while <code>false</code> returns the number of events
* that saw this trigger type without regards for other trigger
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerEvent.java Mon Mar 21 18:04:10 2016
@@ -57,8 +57,6 @@
* was successfully matched to a trigger in the SSP bank.
* @param tiFlags - An array of size 6 indicating which TI bank
* flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
*/
public void matchedReconTrigger(boolean[] tiFlags) {
matchedTriggers(tiFlags, RECON);
@@ -70,8 +68,6 @@
* which cuts did and did not match.
* @param tiFlags - An array of size 6 indicating which TI bank
* flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
* @param matchedCuts - An array of size 3 or 4 indicating which
* cuts did and did not align between the triggers.
*/
@@ -84,8 +80,6 @@
* successfully matched to a trigger in the SSP bank.
* @param tiFlags - An array of size 6 indicating which TI bank
* flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
*/
public void matchedSSPTrigger(boolean[] tiFlags) {
matchedTriggers(tiFlags, SSP);
@@ -97,8 +91,6 @@
* cuts did and did not match.
* @param tiFlags - An array of size 6 indicating which TI bank
* flags are active and which are not.
- * @param triggerTypeID - An identifier indicating the type of
- * trigger that was matched.
* @param matchedCuts - An array of size 3 or 4 indicating which
* cuts did and did not align between the triggers.
*/
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/data/TriggerStatModule.java Mon Mar 21 18:04:10 2016
@@ -135,7 +135,6 @@
* bank flag was active.
* @param tiTypeID - The identifier for the type of TI bank trigger
* that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
* @return Returns the number of triggers as an <code>int</code>.
*/
public int getMatchedReconSimulatedTriggers(int tiTypeID) {
@@ -157,7 +156,6 @@
* bank flag was active.
* @param tiTypeID - The identifier for the type of TI bank trigger
* that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
* @return Returns the number of triggers as an <code>int</code>.
*/
public int getMatchedSSPSimulatedTriggers(int tiTypeID) {
@@ -208,7 +206,6 @@
* flag was active.
* @param tiTypeID - The identifier for the type of TI bank trigger
* that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
* @return Returns the number of triggers as an <code>int</code>.
*/
public int getReconSimulatedTriggers(int tiTypeID) {
@@ -249,7 +246,6 @@
* flag was active.
* @param tiTypeID - The identifier for the type of TI bank trigger
* that should be active.
- * @param triggerTypeID - The identifier for the type of trigger.
* @return Returns the number of triggers as an <code>int</code>.
*/
public int getSSPSimulatedTriggers(int tiTypeID) {
Modified: java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java
=============================================================================
--- java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java (original)
+++ java/branches/jeremy-dev2/analysis/src/main/java/org/hps/analysis/trigger/util/Pair.java Mon Mar 21 18:04:10 2016
@@ -13,8 +13,8 @@
/**
* Creates a pair of the two indicated objects.
- * @param firstObject - The first object.
- * @param secondObject - The second object.
+ * @param firstElement - The first object.
+ * @param secondElement - The second object.
*/
public Pair(E firstElement, F secondElement) {
this.firstObject = firstElement;
Modified: java/branches/jeremy-dev2/conditions/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/conditions/pom.xml (original)
+++ java/branches/jeremy-dev2/conditions/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/conditions/</url>
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ConditionsDriver.java Mon Mar 21 18:04:10 2016
@@ -28,8 +28,8 @@
* This is a "special" Driver which must have its initialization occur at the right time. It has a custom initialization
* method {@link #initialize()} which should be called after all Driver setup has occurred, but before the job actually
* begins. This is so the conditions system functions properly, including the activation of registered listeners. The
- * setup is performed by in the class {@link org.hps.job.JobManager}, which is used in the default command line front
- * end of the hps-distribution. If that class is not being used, then the method must be executed manually at the right
+ * setup is performed by the <code>JobManager</code>, which is used in the default command line front end of the
+ * hps-distribution. If that class is not being used, then the method must be executed manually at the right
* time to achieve the proper behavior.
*
* @author Jeremy McCormick, SLAC
@@ -145,4 +145,12 @@
public final void setXmlConfigResource(final String xmlConfigResource) {
this.xmlConfigResource = xmlConfigResource;
}
+
+ public int getRunNumber() {
+ return this.runNumber;
+ }
+
+ public String getDetectorName() {
+ return this.detectorName;
+ }
}
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObject.java Mon Mar 21 18:04:10 2016
@@ -206,8 +206,8 @@
* Get a field value.
*
* @param name the field name
- * @param T the field value
- * @param <T> the implicit return return
+ * @param <T> the implicit return type
+ * @return the value of field cast to given type
*/
@Override
public <T> T getFieldValue(final String name) {
@@ -247,7 +247,7 @@
/**
* Return <code>true</code> if collection ID is valid.
*
- * @param <code>true</code> if collection ID is valid
+ * @return <code>true</code> if collection ID is valid
*/
@Override
public boolean hasValidCollectionId() {
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/BaseConditionsObjectCollection.java Mon Mar 21 18:04:10 2016
@@ -609,7 +609,7 @@
/**
* Set the table meta data of the collection.
*
- * @param the table meta data of the collection
+ * @param tableMetaData the table meta data of the collection
* @see TableMetaData
*/
@Override
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectCollection.java Mon Mar 21 18:04:10 2016
@@ -64,7 +64,6 @@
* Load collection from a CSV file.
*
* @param file the input CSV file
- * @param delimiter the field delimiter (leave blank for default which is comma-delimited)
* @throws IOException if there is an error closing the reader
* @throws FileNotFoundException if the input file does not exist
* @throws ConditionsObjectException if there is an error creating a conditions object
@@ -104,7 +103,6 @@
* Write the collection contents to a text file.
*
* @param file the output text file
- * @param delimiter the field delimiter (leave blank for default which is comma-delimited)
*/
void writeCsv(File file) throws IOException;
}
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/ConditionsObjectException.java Mon Mar 21 18:04:10 2016
@@ -16,7 +16,7 @@
/**
* Error with a message.
*
- * @param message the error message
+ * @param e the original exception
*/
public ConditionsObjectException(Exception e) {
super(e);
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/package-info.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/package-info.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/api/package-info.java Mon Mar 21 18:04:10 2016
@@ -1,11 +1,13 @@
/**
- * User interface to the database conditions system
+ * User API to the database conditions system
*
* @author Jeremy McCormick, SLAC
- * @see ConditionsObject
- * @see ConditionsObjectCollection
- * @see ConditionsSeries
- * @see ConditionsRecord
+ *
+ * @see org.hps.conditions.api.ConditionsObject
+ * @see org.hps.conditions.api.ConditionsObjectCollection
+ * @see org.hps.conditions.api.ConditionsSeries
+ * @see org.hps.conditions.api.ConditionsRecord
+ * @see org.hps.conditions.api.ConditionsTag
*/
package org.hps.conditions.api;
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/AbstractConditionsObjectConverter.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/AbstractConditionsObjectConverter.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/AbstractConditionsObjectConverter.java Mon Mar 21 18:04:10 2016
@@ -14,12 +14,12 @@
import org.lcsim.conditions.ConditionsManager;
/**
- * Implementation of default conversion from database tables to a {@link ConditionsObject} class.
- * <p>
- * This class actually returns collections and not individual objects.
+ * Implementation of default conversion from database tables records to a
+ * {@link org.hps.conditions.api.ConditionsObjectCollection}.
*
* @author Jeremy McCormick, SLAC
- * @param <T> The type of the returned data which should be a class extending {@link BaseConditionsObjectCollection}.
+ * @param <T> The type of the returned data which should be a class extending
+ * {@link org.hps.conditions.api.BaseConditionsObjectCollection}.
*/
public abstract class AbstractConditionsObjectConverter<T> implements ConditionsConverter<T> {
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/ConditionsTagConverter.java Mon Mar 21 18:04:10 2016
@@ -23,14 +23,13 @@
private static final String SELECT_SQL = "SELECT conditions_id, tag from conditions_tags where tag = ?";
/**
- * Get a {@link org.hps.conditions.api.ConditionsTagCollection} which specifies a group of collections
- * that are tagged in the <i>conditions_tags</i> table in the database.
- * <p>
- * The run number is not used, and the <code>name</code> argument specifies the tag name.
+ * Get a {@link org.hps.conditions.api.ConditionsTag.ConditionsTagCollection} which specifies a group of
+ * collections that are tagged in the <i>conditions_tags</i> table in the database. The <code>name</code>
+ * argument is the tag name.
*
- * @param manager The current conditions manager.
- * @param name The name of the conditions set.
- * @return The matching ConditionsRecords.
+ * @param manager the current conditions manager
+ * @param name the name of the conditions set
+ * @return the matching <code>ConditionsRecord</code> objects
*/
@Override
public ConditionsTagCollection getData(final ConditionsManager manager, final String name) {
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/DatabaseConditionsManager.java Mon Mar 21 18:04:10 2016
@@ -337,7 +337,7 @@
}
/**
- * Clear the tags used to filter the {@link org.hps.conditons.api.ConditionsRecord}s.
+ * Clear the tags used to filter the {@link org.hps.conditions.api.ConditionsRecord}s.
*/
public void clearTags() {
this.tags.clear();
@@ -504,9 +504,8 @@
/**
* Add a row for a new collection and return the new collection ID assigned to it.
- *
- * @param tableName the name of the table
- * @param comment an optional comment about this new collection
+ * @param collection the conditions object collection
+ * @param description text description for the new collection ID record in the database
* @return the collection's ID
* @throws SQLException
*/
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/package-info.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/package-info.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/database/package-info.java Mon Mar 21 18:04:10 2016
@@ -1,5 +1,10 @@
/**
- * Implementation of database API to detector conditions
+ * Implementation of database API for detector conditions
+ * <p>
+ * The {@link org.hps.conditions.database.DatabaseConditionsManager} has a set of converters for handling the
+ * conversion of conditions table data to typed collections. The converters are created automatically using
+ * introspection of {@link org.hps.conditions.api.ConditionsObject} classes that have the
+ * {@link org.hps.conditions.database.Table} and {@link org.hps.conditions.database.Field} annotations.
*
* @author Jeremy McCormick, SLAC
*/
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditions.java Mon Mar 21 18:04:10 2016
@@ -11,7 +11,7 @@
* settings, per crystal.
* <p>
* Unlike most conditions data types, it does not extend {@link org.hps.conditions.api.ConditionsObject}, because it is
- * a composite object containing data assembled from many other {@link org.hps.conditions.ConditionsObjects} and has a
+ * a composite object containing data assembled from many other {@link org.hps.conditions.api.ConditionsObject} and has a
* special data converter {@link EcalConditionsConverter}.
*
* @author Jeremy McCormick, SLAC
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/EcalConditionsConverter.java Mon Mar 21 18:04:10 2016
@@ -22,6 +22,7 @@
*
* @author Jeremy McCormick, SLAC
* @author Omar Moreno, UCSC
+ *
* @see EcalConditions
* @see EcalChannel
* @see EcalGain
@@ -38,8 +39,7 @@
/**
* Create combined ECAL conditions object containing all data for the current run.
- *
- * @param manager the conditions manager
+ *
* @param name the conditions set name (unused but must satisfy conditions API)
*/
@Override
@@ -129,7 +129,6 @@
/**
* Get the default collections of {@link EcalBadChannel} objects.
*
- * @param manager the conditions manager
* @return the collections of ECAL bad channel objects
*/
protected ConditionsSeries<EcalBadChannel, EcalBadChannelCollection> getEcalBadChannelSeries() {
@@ -139,7 +138,6 @@
/**
* Get the default {@link EcalCalibration} collection.
*
- * @param manager the conditions manager
* @return the collection of ECAL channel calibration objects
*/
protected EcalCalibrationCollection getEcalCalibrationCollection() {
@@ -149,7 +147,6 @@
/**
* Get the default {@link EcalChannel} collection.
*
- * @param manager the conditions manager
* @return the default ECAL channel object collection
*/
protected EcalChannelCollection getEcalChannelCollection() {
@@ -159,7 +156,6 @@
/**
* Get the default {@link EcalGain} collection.
*
- * @param manager the conditions manager
* @return the ECAL channel gain collection
*/
protected EcalGainCollection getEcalGainCollection() {
@@ -169,7 +165,6 @@
/**
* Get the default {@link EcalTimeShift} collection.
*
- * @param manager the conditions manager
* @return the collection of ECAL time shift objects
*/
protected EcalTimeShiftCollection getEcalTimeShiftCollection() {
@@ -177,9 +172,8 @@
}
/**
- * Get the default {@link EcalPulseWith} collection.
- *
- * @param manager the conditions manager
+ * Get the default {@link EcalPulseWidth} collection.
+ *
* @return the collection of ECAL pulse widths
*/
protected EcalPulseWidthCollection getEcalPulseWidthCollection() {
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/ecal/TestRunEcalConditionsConverter.java Mon Mar 21 18:04:10 2016
@@ -20,7 +20,6 @@
/**
* Get the collections of {@link EcalBadChannel} objects for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run bad channel collections
*/
@Override
@@ -31,7 +30,6 @@
/**
* Get the {@link EcalCalibration} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL calibration collection
*/
@Override
@@ -43,7 +41,6 @@
/**
* Get the {@link EcalChannel} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL channel collection
*/
@Override
@@ -54,7 +51,6 @@
/**
* Get the {@link EcalGain} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL gain collection
*/
@Override
@@ -65,7 +61,6 @@
/**
* Get the {@link EcalTimeShift} collection for Test Run.
*
- * @param manager the conditions manager
* @return the Test Run ECAL time shift collection
*/
@Override
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/package-info.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/package-info.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/package-info.java Mon Mar 21 18:04:10 2016
@@ -2,8 +2,7 @@
* Database conditions system
* <p>
* The HPS conditions module provides facilities for accessing time dependent conditions for a detector at runtime using
- * a framework built on the LCSim conditions system. The {@link DatabaseConditionsReader} has a set of converters for
- * reading data from tables using SQL queries and creating appropriate, typed objects for them.
+ * a framework built on the LCSim conditions system.
*
* @author Jeremy McCormick, SLAC
* @see org.hps.conditions.api
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/AbstractSvtDaqMapping.java Mon Mar 21 18:04:10 2016
@@ -20,7 +20,7 @@
@SuppressWarnings("serial")
public static abstract class AbstractSvtDaqMappingCollection<T extends AbstractSvtDaqMapping> extends
BaseConditionsObjectCollection<T> {
-
+
/**
* Get the orientation of a sensor.
*
@@ -65,8 +65,8 @@
/**
* Get the orientation of an SVT sensor (AXIAL or STEREO).
*
- * @see AXIAL
- * @see STEREO
+ * @see #AXIAL
+ * @see #STEREO
* @return the orientation of the SVT sensor
*/
@Field(names = {"orientation"})
@@ -87,7 +87,7 @@
/**
* Set the SVT sensor layer number (1-10 for test run and 1-12 for engineering run).
*
- * @param layer : SVT sensor layer number
+ * @param layer SVT sensor layer number
*/
public final void setLayerNumber(final int layer) {
this.setFieldValue("layer", layer);
@@ -109,8 +109,8 @@
* Set the SVT half that the sensor belongs to.
*
* @param svtHalf the SVT half (TOP or BOTTOM)
- * @see TOP_HALF
- * @see BOTTOM_HALF
+ * @see #TOP_HALF
+ * @see #BOTTOM_HALF
*/
public final void setSvtHalf(final String svtHalf) {
if (!svtHalf.equals(AbstractSvtDaqMapping.TOP_HALF) && !svtHalf.equals(AbstractSvtDaqMapping.BOTTOM_HALF)) {
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/DaqMapHandler.java Mon Mar 21 18:04:10 2016
@@ -96,7 +96,7 @@
* Method that is triggered when the end of a tag is encountered.
*
* @param uri the Namespace URI.
- * @param locaName the local name (without prefix)
+ * @param localName the local name (without prefix)
* @param qName the qualified name (with prefix)
* @throws SAXException if there is an error processing the element
*/
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/OpeningAngleLoader.java Mon Mar 21 18:04:10 2016
@@ -30,7 +30,7 @@
* Check if the run record looks good.
*
* @param data
- * @return
+ * @return whether to accept the run or not
*/
private static boolean acceptRun(final RunData data) {
return !data.getRecord().get("to_tape").equals("JUNK")
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtDaqMapping.java Mon Mar 21 18:04:10 2016
@@ -129,8 +129,8 @@
/**
* Get the side of the sensor (ELECTRON or POSITRON).
*
- * @see ELECTRON
- * @see POSITRON
+ * @see #ELECTRON
+ * @see #POSITRON
* @return sensor side (ELECTRON or POSITRON)
*/
@Field(names = {"side"})
@@ -160,8 +160,8 @@
* Set the side of the sensor (ELECTRON or POSITRON).
*
* @param side the sensor side (ELECTRON or POSITRON)
- * @see {@link #ELECTRON}
- * @see {@link #POSITRON}
+ * @see #ELECTRON
+ * @see #POSITRON
*/
public final void setSide(final String side) {
if (!side.equals(SvtDaqMapping.ELECTRON) && !side.equals(SvtDaqMapping.POSITRON)) {
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/SvtT0Shift.java Mon Mar 21 18:04:10 2016
@@ -23,7 +23,7 @@
/**
* Get the {@link SvtT0Shift} associated with a given DAQ pair.
*
- * @param DAQ pair for a given sensor
+ * @param pair DAQ pair for a given sensor
* @return the {@link SvtT0Shift} associated with the DAQ pair or null if does not exist
*/
@Override
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtChannel.java Mon Mar 21 18:04:10 2016
@@ -26,7 +26,7 @@
/**
* Find a collection of channels by their DAQ pair assignment.
*
- * @param the DAQ pair (FEB ID and FEB Hybrid ID)
+ * @param pair the DAQ pair (FEB ID and FEB Hybrid ID)
* @return the collection of channels
*/
@Override
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtConditions.java Mon Mar 21 18:04:10 2016
@@ -10,7 +10,7 @@
/**
* This class contains all test run SVT conditions data by readout channel. {@link TestRunSvtChannel} objects from the
- * SVT channel map should be used to lookup the conditions using the {@link #getChannelConstants(TestRunSvtChannel)}
+ * SVT channel map should be used to lookup the conditions using the {@link #getChannelConstants(AbstractSvtChannel)}
* method.
*
* @author Jeremy McCormick, SLAC
Modified: java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java
=============================================================================
--- java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java (original)
+++ java/branches/jeremy-dev2/conditions/src/main/java/org/hps/conditions/svt/TestRunSvtT0Shift.java Mon Mar 21 18:04:10 2016
@@ -22,7 +22,7 @@
/**
* Get the {@link TestRunSvtT0Shift} associated with a given DAQ pair
*
- * @param DAQ pair for a given sensor
+ * @param pair DAQ pair for a given sensor
* @return the {@link TestRunSvtT0Shift} associated with the DAQ pair or null if does not exist
*/
@Override
Modified: java/branches/jeremy-dev2/crawler/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/crawler/pom.xml (original)
+++ java/branches/jeremy-dev2/crawler/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/crawler/</url>
Modified: java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java
=============================================================================
--- java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java (original)
+++ java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/DatacatAddFile.java Mon Mar 21 18:04:10 2016
@@ -36,6 +36,7 @@
OPTIONS.addOption("f", "folder", true, "datacat folder");
OPTIONS.addOption("s", "site", true, "datacat site");
OPTIONS.addOption("u", "base-url", true, "provide a base URL of the datacat server");
+ OPTIONS.addOption("D", "dry-run", false, "dry run mode which will not updated the datacat");
}
/**
@@ -136,9 +137,13 @@
/**
* Run the job.
*/
- private void run() {
- List<DatasetModel> datasets = DatacatHelper.createDatasets(paths, config.folder(), config.site().toString());
- DatacatHelper.addDatasets(datasets, config.folder(), config.datacatUrl());
- LOGGER.info("added " + datasets.size() + " datasets");
- }
+ private void run() {
+ List<DatasetModel> datasets = DatacatHelper.createDatasets(paths, config.folder(), config.site().toString());
+ if (!config.dryRun()) {
+ DatacatHelper.addDatasets(datasets, config.folder(), config.datacatUrl());
+ LOGGER.info("Added " + datasets.size() + " datasets to datacat.");
+ } else {
+ LOGGER.info("Dry run mode; skipped adding dataset.");
+ }
+ }
}
Modified: java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java
=============================================================================
--- java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java (original)
+++ java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/EvioMetadataReader.java Mon Mar 21 18:04:10 2016
@@ -20,7 +20,6 @@
import org.hps.record.triggerbank.TriggerType;
import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
-import org.jlab.coda.jevio.EvioException;
import org.jlab.coda.jevio.EvioReader;
/**
@@ -48,7 +47,7 @@
*/
@Override
public Map<String, Object> getMetadata(final File file) throws IOException {
-
+
long totalEvents = 0;
int physicsEvents = 0;
int badEvents = 0;
@@ -62,7 +61,7 @@
Integer endTimestamp = null;
Integer goTimestamp = null;
Double triggerRate = null;
-
+
// Processor for calculating TI time offsets.
TiTimeOffsetEvioProcessor tiProcessor = new TiTimeOffsetEvioProcessor();
@@ -79,7 +78,7 @@
if (!(fileNumber % 10 == 0)) {
blinded = 1;
}
-
+
// Get file size.
long size = 0;
File cacheFile = file;
@@ -87,19 +86,27 @@
cacheFile = FileUtilities.getCachedFile(file);
}
size = cacheFile.length();
-
+
// Compute MD5 checksum string.
String checksum = FileUtilities.createMD5Checksum(cacheFile);
EvioReader evioReader = null;
- try {
+ try { /* file reader and event processing block */
+
// Open file in sequential mode.
- evioReader = EvioFileUtilities.open(file, true);
+ try {
+ evioReader = EvioFileUtilities.open(file, true);
+ } catch (Exception e) { /* any errors opening file caught here */
+ LOGGER.severe("Error opening EVIO file.");
+ throw new RuntimeException("Failed to open " + file.getPath(), e);
+ }
+
EvioEvent evioEvent = null;
// Event read loop.
eventLoop: while (true) {
- try {
+
+ try { /* event reading block */
// Parse next event.
evioEvent = evioReader.parseNextEvent();
@@ -108,70 +115,78 @@
LOGGER.fine("EOF after " + totalEvents + " events.");
break eventLoop;
}
-
- // Increment event count (doesn't count events that can't be parsed).
+
+ // Count parsed event in total.
++totalEvents;
- // Debug print event number and tag.
- LOGGER.finest("Parsed event " + evioEvent.getEventNumber() + " with tag 0x"
- + String.format("%08x", evioEvent.getHeader().getTag()));
-
- // Get head bank.
- BaseStructure headBank = HEAD_BANK.findBank(evioEvent);
-
- // Current timestamp.
- int thisTimestamp = 0;
-
- // Process head bank if not null.
+ } catch (Exception e) {
+ LOGGER.log(Level.SEVERE, "Error parsing next EVIO event.", e);
+ ++badEvents;
+ // ++totalEvents;
+ continue;
+ }
+
+ // Debug print event number and tag.
+ LOGGER.finest("Parsed event " + evioEvent.getEventNumber() + " with tag 0x"
+ + String.format("%08x", evioEvent.getHeader().getTag()));
+
+ // Get head bank.
+ BaseStructure headBank = HEAD_BANK.findBank(evioEvent);
+
+ // Current timestamp.
+ int thisTimestamp = 0;
+
+ // Process head bank if not null.
+ if (headBank != null) {
if (headBank != null) {
- if (headBank != null) {
- final int[] headBankData = headBank.getIntData();
- thisTimestamp = headBankData[3];
- if (thisTimestamp != 0) {
- // First header timestamp.
- if (firstHeadTimestamp == null) {
- firstHeadTimestamp = thisTimestamp;
- LOGGER.finer("First head timestamp " + firstHeadTimestamp + " from event "
- + evioEvent.getEventNumber());
- }
-
- // Last header timestamp.
- lastHeadTimestamp = thisTimestamp;
- }
-
- // Run number.
- if (run == null) {
- if (headBankData[1] != 0) {
- run = (long) headBankData[1];
- LOGGER.finer("Run number " + run + " from event " + evioEvent.getEventNumber());
- }
+ final int[] headBankData = headBank.getIntData();
+ thisTimestamp = headBankData[3];
+ if (thisTimestamp != 0) {
+ // First header timestamp.
+ if (firstHeadTimestamp == null) {
+ firstHeadTimestamp = thisTimestamp;
+ LOGGER.finer("First head timestamp " + firstHeadTimestamp + " from event "
+ + evioEvent.getEventNumber());
+ }
+
+ // Last header timestamp.
+ lastHeadTimestamp = thisTimestamp;
+ }
+
+ // Run number.
+ if (run == null) {
+ if (headBankData[1] != 0) {
+ run = (long) headBankData[1];
+ LOGGER.finer("Run number " + run + " from event " + evioEvent.getEventNumber());
}
}
}
-
- if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
-
- final int[] eventIdData = EvioEventUtilities.getEventIdData(evioEvent);
-
- if (eventIdData != null) {
-
- // Set the last physics event.
- lastPhysicsEvent = eventIdData[0];
-
- // Set the first physics event.
- if (firstPhysicsEvent == null) {
- firstPhysicsEvent = eventIdData[0];
- LOGGER.finer("Set first physics event " + firstPhysicsEvent);
- }
+ }
+
+ if (EvioEventUtilities.isPhysicsEvent(evioEvent)) {
+
+ final int[] eventIdData = EvioEventUtilities.getEventIdData(evioEvent);
+
+ if (eventIdData != null) {
+
+ // Set the last physics event.
+ lastPhysicsEvent = eventIdData[0];
+
+ // Set the first physics event.
+ if (firstPhysicsEvent == null) {
+ firstPhysicsEvent = eventIdData[0];
+ LOGGER.finer("Set first physics event " + firstPhysicsEvent);
}
-
- ++physicsEvents;
- } else if (EvioEventUtilities.isControlEvent(evioEvent)) {
- int[] controlData = EvioEventUtilities.getControlEventData(evioEvent);
+ }
+
+ ++physicsEvents;
+ } else if (EvioEventUtilities.isControlEvent(evioEvent)) {
+ int[] controlData = EvioEventUtilities.getControlEventData(evioEvent);
+ if (controlData != null) {
if (controlData[0] != 0) {
if (EventTagConstant.PRESTART.isEventTag(evioEvent)) {
prestartTimestamp = controlData[0];
- }
+ }
if (EventTagConstant.GO.isEventTag(evioEvent)) {
goTimestamp = controlData[0];
}
@@ -179,29 +194,31 @@
endTimestamp = controlData[0];
}
}
+ } else {
+ LOGGER.warning("Control data null; event num = " + evioEvent.getEventNumber()
+ + "; tag = " + String.format("%08x", evioEvent.getHeader().getTag())
+ + "; type = " + EventTagConstant.getEventTag(evioEvent)
+ + "; prev event ID = " + lastPhysicsEvent);
}
-
- // Count trigger types for this event.
- Set<TriggerType> triggerTypes = TriggerType.getTriggerTypes(evioEvent);
- for (TriggerType mask : triggerTypes) {
- int count = triggerCounts.get(mask) + 1;
- triggerCounts.put(mask, count);
- LOGGER.finest("Incremented " + mask.name() + " to " + count);
- }
-
- // Activate TI time offset processor.
- tiProcessor.process(evioEvent);
-
- } catch (Exception e) {
- // Trap all event processing errors.
- badEvents++;
- LOGGER.warning("Error processing EVIO event " + evioEvent.getEventNumber());
- }
- }
- } catch (final EvioException e) {
- // Error reading the EVIO file.
- throw new IOException("Error reading EVIO file.", e);
+ }
+
+ // Count trigger types for this event.
+ Set<TriggerType> triggerTypes = TriggerType.getTriggerTypes(evioEvent);
+ for (TriggerType mask : triggerTypes) {
+ int count = triggerCounts.get(mask) + 1;
+ triggerCounts.put(mask, count);
+ LOGGER.finest("Incremented " + mask.name() + " to " + count);
+ }
+
+ // Activate TI time offset processor.
+ tiProcessor.process(evioEvent);
+
+ }
+ } catch(Exception e) {
+ // Any uncaught errors from event processing are re-thrown here.
+ throw new RuntimeException("Error processing EVIO event.", e);
} finally {
+
// Close the reader.
if (evioReader != null) {
try {
@@ -214,18 +231,6 @@
LOGGER.info("Done reading " + totalEvents + " events from " + file.getPath());
- // Rough trigger rate calculation.
- try {
- if (firstHeadTimestamp != null && lastHeadTimestamp != null && totalEvents > 0
- && (firstHeadTimestamp - lastHeadTimestamp != 0)) {
- triggerRate = calculateTriggerRate(firstHeadTimestamp, lastHeadTimestamp, totalEvents);
- } else {
- LOGGER.log(Level.WARNING, "Missing information for calculating trigger rate.");
- }
- } catch (Exception e) {
- LOGGER.log(Level.WARNING, "Error calculating the trigger rate.", e);
- }
-
// Create and fill the metadata map.
final Map<String, Object> metadataMap = new LinkedHashMap<String, Object>();
@@ -242,8 +247,8 @@
metadataMap.put("runMax", run);
metadataMap.put("eventCount", totalEvents);
metadataMap.put("size", size);
- metadataMap.put("checksum", checksum);
-
+ metadataMap.put("checksum", checksum);
+
// File sequence number.
metadataMap.put("FILE", fileNumber);
@@ -253,21 +258,21 @@
// First and last timestamps which may come from control or physics events.
if (firstHeadTimestamp != null) {
metadataMap.put("FIRST_HEAD_TIMESTAMP", firstHeadTimestamp);
- }
-
+ }
+
if (lastHeadTimestamp != null) {
metadataMap.put("LAST_HEAD_TIMESTAMP", lastHeadTimestamp);
- }
+ }
// First and last physics event numbers.
if (firstPhysicsEvent != null) {
metadataMap.put("FIRST_PHYSICS_EVENT", firstPhysicsEvent);
- }
-
+ }
+
if (lastPhysicsEvent != null) {
metadataMap.put("LAST_PHYSICS_EVENT", lastPhysicsEvent);
}
-
+
// Timestamps which are only set if the corresponding control events were found in the file.
if (prestartTimestamp != null) {
metadataMap.put("PRESTART_TIMESTAMP", prestartTimestamp);
@@ -283,29 +288,40 @@
metadataMap.put("TI_TIME_MIN_OFFSET", new Long(tiProcessor.getMinOffset()).toString());
metadataMap.put("TI_TIME_MAX_OFFSET", new Long(tiProcessor.getMaxOffset()).toString());
metadataMap.put("TI_TIME_N_OUTLIERS", tiProcessor.getNumOutliers());
-
+
// Event counts.
metadataMap.put("BAD_EVENTS", badEvents);
-
+
// Physics event count.
metadataMap.put("PHYSICS_EVENTS", physicsEvents);
-
- // Rough trigger rate.
- if (triggerRate != null && !Double.isInfinite(triggerRate) && !Double.isNaN(triggerRate)) {
- DecimalFormat df = new DecimalFormat("#.##");
- df.setRoundingMode(RoundingMode.CEILING);
- LOGGER.info("Setting trigger rate to " + triggerRate + " Hz.");
- metadataMap.put("TRIGGER_RATE", Double.parseDouble(df.format(triggerRate)));
- } else {
- LOGGER.warning("Failed to calculate trigger rate.");
- }
-
- // Trigger type counts.
+
+ // Rough trigger rate calculation.
+ try { /* trigger rate block */
+ if (firstHeadTimestamp != null && lastHeadTimestamp != null && totalEvents > 0
+ && (firstHeadTimestamp - lastHeadTimestamp != 0)) {
+ triggerRate = calculateTriggerRate(firstHeadTimestamp, lastHeadTimestamp, totalEvents);
+ if (triggerRate != null && !Double.isInfinite(triggerRate) && !Double.isNaN(triggerRate)) {
+ DecimalFormat df = new DecimalFormat("#.##");
+ df.setRoundingMode(RoundingMode.CEILING);
+ triggerRate = Double.parseDouble(df.format(triggerRate));
+ LOGGER.info("Setting trigger rate to " + triggerRate + " Hz.");
+ metadataMap.put("TRIGGER_RATE", triggerRate);
+ } else {
+ LOGGER.warning("Skipping invalid trigger rate value of " + triggerRate + ".");
+ }
+ } else {
+ LOGGER.log(Level.WARNING, "Some information is missing for calculating trigger rate.");
+ }
+ } catch (Exception e) {
+ LOGGER.log(Level.WARNING, "Error calculating the trigger rate.", e);
+ }
+
+ // Get trigger type counts.
for (Entry<TriggerType, Integer> entry : triggerCounts.entrySet()) {
metadataMap.put(entry.getKey().name(), entry.getValue());
}
- // Print the file metadata to log.
+ // Print the metadata to log.
StringBuffer sb = new StringBuffer();
sb.append('\n');
for (Entry<String, Object> entry : metadataMap.entrySet()) {
@@ -316,7 +332,7 @@
// Return the completed metadata map.
return metadataMap;
}
-
+
/**
* Calculate the trigger rate in Hz.
*
Modified: java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java
=============================================================================
--- java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java (original)
+++ java/branches/jeremy-dev2/crawler/src/main/java/org/hps/crawler/FileMetadataReader.java Mon Mar 21 18:04:10 2016
@@ -14,8 +14,8 @@
/**
* Create a metadata map with keys and values from the contents of a file.
*
- * @param the input file for extracting metadata
- * @return the metadata map
+ * @param file the input file from which to extract metadata
+ * @return the metadata map of field names to values
* @throws IOException if there is an error reading the file
*/
Map<String, Object> getMetadata(File file) throws IOException;
Modified: java/branches/jeremy-dev2/detector-data/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/detector-data/pom.xml (original)
+++ java/branches/jeremy-dev2/detector-data/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/detector-data/</url>
Modified: java/branches/jeremy-dev2/detector-model/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/detector-model/pom.xml (original)
+++ java/branches/jeremy-dev2/detector-model/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<build>
<plugins>
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/HpsTracker2.java Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
import org.lcsim.detector.converter.compact.SubdetectorDetectorElement;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
/**
- * Class describing an {@link HPSTracker2} subdetector.
+ * Detector element for <code>HPSTracker2</code> type.
*
* @author Omar Moreno <[log in to unmask]>
* @author Jeremy McCormick <[log in to unmask]>
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/converter/compact/subdetector/SvtStereoLayer.java Mon Mar 21 18:04:10 2016
@@ -21,7 +21,7 @@
/**
* Ctor
*
- * @param layerNumber Layer number to which the stereo pair belongs to
+ * @param stereoLayerNumber Layer number to which the stereo pair belongs
* @param firstSensor The first sensor in the stereo layer
* @param secondSensor The second sensor in the stereo layer
*/
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsSiSensor.java Mon Mar 21 18:04:10 2016
@@ -303,9 +303,8 @@
/**
* Generate an ID for a channel (strip) on a sensor.
*
- * @param sensor
* @param channel : Physical channel number
- * @return ID
+ * @return the channel ID
*/
public long makeChannelID(final int channel) {
final int sideNumber = this.hasElectrodesOnSide(ChargeCarrier.HOLE) ? ChargeCarrier.HOLE.charge()
@@ -385,7 +384,7 @@
/**
* Set the front end board (FEB) ID of the sensor.
*
- * @param FEB ID The FEB ID of the sensor.
+ * @param febID FEB ID The FEB ID of the sensor.
*/
public void setFebID(final int febID) {
this.febID = febID;
@@ -394,7 +393,7 @@
/**
* Set the FEB hybrid ID of the sensor.
*
- * @param FEB hybrid ID The FEB hybrid ID.
+ * @param febHybridID FEB hybrid ID The FEB hybrid ID.
*/
public void setFebHybridID(final int febHybridID) {
this.febHybridID = febHybridID;
@@ -412,7 +411,7 @@
/**
* Flag the sensor as being axial.
*
- * @param true if the sensor is Axial, false otherwise
+ * @param isAxial true if the sensor is Axial, false otherwise
*/
public void setAxial(final boolean isAxial) {
this.isAxial = isAxial;
@@ -421,7 +420,7 @@
/**
* Flag the sensor as being stereo
*
- * @param true is the sensor is stereo, false otherwise
+ * @param isStereo true is the sensor is stereo, false otherwise
*/
public void setStereo(final boolean isStereo) {
this.isStereo = isStereo;
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/detector/tracker/silicon/HpsTestRunSiSensor.java Mon Mar 21 18:04:10 2016
@@ -71,7 +71,7 @@
/**
* Set the FPGA ID associated with this sensor.
*
- * @param The FPGA ID
+ * @param fpgaID The FPGA ID
*/
public void setFpgaID(int fpgaID) {
this.fpgaID = fpgaID;
@@ -80,7 +80,7 @@
/**
* Set the hybrid ID associated with this sensor.
*
- * @param The hybrid ID.
+ * @param hybridID The hybrid ID.
*/
public void setHybridID(int hybridID) {
this.hybridID = hybridID;
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTestRunTracker2014LCDDBuilder.java Mon Mar 21 18:04:10 2016
@@ -143,7 +143,7 @@
/**
* Rules for adding the LCDD half module geometry.
- * @param bundle - module to be added
+ * @param bundle2 - module to be added
* @param mother - mother LCDD geometry object
*/
protected void addTestRunHalfModule(HalfModuleBundle bundle2, LCDDSurveyVolume mother) {
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014GeometryDefinition.java Mon Mar 21 18:04:10 2016
@@ -27,8 +27,8 @@
*
*/
public class HPSTracker2014GeometryDefinition extends HPSTrackerGeometryDefinition {
-
- private static final Logger LOGGER = Logger.getLogger(HPSTracker2014GeometryDefinition.class.getPackage().getName());
+
+ private static final Logger LOGGER = Logger.getLogger(HPSTracker2014GeometryDefinition.class.getPackage().getName());
public HPSTracker2014GeometryDefinition(boolean debug, Element node) {
super(debug, node);
@@ -38,8 +38,7 @@
doBottom = true;
doTop = true;
layerBitMask = 0x3F;
- }
-
+ }
/* (non-Javadoc)
* @see org.lcsim.geometry.compact.converter.HPSTrackerBuilder#build()
@@ -47,12 +46,11 @@
public void build() {
if(isDebug()) System.out.printf("%s: constructing the geometry objects\n", this.getClass().getSimpleName());
-
// Create alignment correction objects
// THis is really a ugly approach with MP corrections initialized before and
// the survey corrections based on the XML node
- // FIX THIS! //TODO
+ // TODO: FIX THIS!
AlignmentCorrection alignmentCorrections = new AlignmentCorrection();
alignmentCorrections.setNode(node);
AlignmentCorrection supBotCorr = getL13UChannelAlignmentCorrection(false);
@@ -74,25 +72,19 @@
SvtBoxBasePlate svtBoxBasePlate = new SvtBoxBasePlate("base_plate",svtBox,null);
surveyVolumes.add(svtBoxBasePlate);
-
-
-
+
SupportRingL13BottomKinMount supportRingKinL13Bottom = new SupportRingL13BottomKinMount("c_support_kin_L13b", svtBox, supBotCorr);
- surveyVolumes.add(supportRingKinL13Bottom);
-
+ surveyVolumes.add(supportRingKinL13Bottom);
UChannelL13 uChannelL13Bottom = new UChannelL13Bottom("support_bottom_L13", svtBox, alignmentCorrections, supportRingKinL13Bottom);
surveyVolumes.add(uChannelL13Bottom);
UChannelL13Plate uChannelL13BottomPlate = new UChannelL13BottomPlate("support_plate_bottom_L13", svtBox, null, uChannelL13Bottom);
surveyVolumes.add(uChannelL13BottomPlate);
-
SupportRingL13TopKinMount supportRingKinL13Top = new SupportRingL13TopKinMount("c_support_kin_L13t", svtBox, supTopCorr);
surveyVolumes.add(supportRingKinL13Top);
-
-
-
+
UChannelL13Top uChannelL13Top = new UChannelL13Top("support_top_L13", svtBox, alignmentCorrections, supportRingKinL13Top);
surveyVolumes.add(uChannelL13Top);
@@ -110,7 +102,6 @@
UChannelL46Plate uChannelL46TopPlate = new UChannelL46TopPlate("support_plate_top_L46", svtBox, null, uChannelL46Top);
surveyVolumes.add(uChannelL46TopPlate);
-
for(int l=1; l<=6;++l) {
if(doLayer(l)) {
@@ -135,30 +126,20 @@
bundle.print();
}
}
-
-
-
-
-
- }
-
+ }
/**
* {@link SurveyVolume} volume defining the pair spectrometer (PS) vacuum chamber
* Reference: tracking volume coordinate system
* Origin: same as reference
* Orientation: u - points in x direction (towards positron side), v - points upstream
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class PSVacuumChamber extends SurveyVolume {
+
public static final double height = PS_vac_box_inner_height;
public static final double width = PS_vac_box_inner_width;
public static final double length = PS_vac_box_inner_length;
-
-
-
+
public PSVacuumChamber(String name, SurveyVolume mother, AlignmentCorrection alignmentCorrection) {
super(name, mother, alignmentCorrection);
init();
@@ -184,9 +165,6 @@
* Reference: PS vacuum chamber coordinate system. Note that the PS vacuum chamber box is placed w.r.t. this box and the target positions.
* Origin: intersection of midplanes vertically and horizontally
* Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SvtBox extends SurveyVolume {
public static final double height = 6.740*inch;
@@ -198,10 +176,7 @@
public static final double center_to_target_z = 13.777*inch;
public static final double center_to_target_x = 0.84*inch;
public static final double center_to_target_y = 0.0;
-
-
-
-
+
public SvtBox(String name, SurveyVolume mother, AlignmentCorrection alignmentCorrection) {
super(name, mother, alignmentCorrection);
init();
@@ -217,8 +192,7 @@
ballPos = new BasicHep3Vector(0, 0, 0);
veePos = new BasicHep3Vector(ballPos.x()+1, ballPos.y(), ballPos.z());
- flatPos = new BasicHep3Vector(ballPos.x(), ballPos.y()+1, ballPos.z());
-
+ flatPos = new BasicHep3Vector(ballPos.x(), ballPos.y()+1, ballPos.z());
}
}
@@ -228,9 +202,6 @@
* Reference: {@link SvtBox} coordinate system.
* Origin: surface of base plate intersection with center of hole for adjustment screw on positron side
* Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SvtBoxBasePlate extends SurveyVolume {
public static final double length = 50.5*inch;
@@ -268,16 +239,11 @@
}
-
-
-
/**
* {@link SurveyVolume} volume defining the coordinate system of the support ring
* Reference: @SvtBoxBasePlate
* Origin: pin position of support ring (electron side)
* Orientation: slot position is vee position (positron side) i.e u points towards the positron side and v in the upstream beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class SupportRing extends SurveyVolume {
@@ -315,14 +281,9 @@
flatPos = VecOp.add(ballPos, vPrime);
}
}
-
-
-
+
/**
* Abstract {@link SurveyVolume} volume defining a coordinate system from the kinematic mount positions for support channels
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public abstract static class SupportRingL13KinMount extends SurveyVolume {
@@ -360,9 +321,6 @@
* Reference: {@link SvtBox} coordinate system
* Origin: cone mount (it's on the electron side)
* Orientation: ball is cone mount, slot mount is vee position and flat is along beam line pointing upstream
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SupportRingL13BottomKinMount extends SupportRingL13KinMount {
@@ -385,9 +343,6 @@
* Reference: @SupportRing coordinate system
* Origin: cone mount (it's on the electron side)
* Orientation: ball is cone mount, slot mount is vee position and flat is along beamline pointing upstream
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class SupportRingL13TopKinMount extends SupportRingL13KinMount {
//public static final double mount_surface_wrt_baseplate_vertically = 5.388*inch;
@@ -420,12 +375,9 @@
// flatPos = new BasicHep3Vector(flat_pos_x,flat_pos_y,flat_pos_z);
// }
}
-
-
- /**
- * Abstract {@link SurveyVolume} volume defining the coordinate system of the L1-3 u-channels
- *
- * @author Per Hansson Adrian <[log in to unmask]>
+
+ /**
+ * Abstract {@link SurveyVolume} volume defining the coordinate system of the L1-3 u-channels
*/
public abstract static class UChannelL13 extends SurveyVolume {
public final static double length = UChannelL13Plate.length;
@@ -457,9 +409,6 @@
* Reference: {@link SupportRingL13BottomKinMount} coordinate system
* Origin: midpoint between upstream survey cones
* Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class UChannelL13Bottom extends UChannelL13 {
private final static double cone_to_edge_of_plate_y = 12.25*inch;
@@ -489,14 +438,9 @@
}
}
-
-
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
* This is at nominal position.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
private static class UChannelL13BottomSurveyBalls {
@@ -523,18 +467,12 @@
}
}
-
-
-
/**
* {@link SurveyVolume} volume defining the coordinate system of the top L1-3 u-channel
* Reference: SupportRingL13TopKinMount coordinate system
* Origin: midpoint between upstream survey cones
* Orientation: u - width pointing towards positron side, v - pointing along the U-channel in the beam direction
* Note that this is flipped w.r.t. bottom support.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class UChannelL13Top extends UChannelL13 {
private final static Hep3Vector ball_kinMount = new BasicHep3Vector(SupportRingL13TopKinMount.kin_mount_pos_x,SupportRingL13TopKinMount.kin_mount_pos_y,SupportRingL13TopKinMount.kin_mount_pos_z);
@@ -567,15 +505,10 @@
return length;
}
}
-
-
-
+
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
* This is at nominal position.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
private static class UChannelL13TopSurveyBalls {
@@ -602,16 +535,8 @@
}
}
-
-
-
-
-
/**
* Abstract {@link SurveyVolume} volume defining the coordinate system of the u-channel plate
-
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public abstract static class UChannelPlate extends SurveyVolume {
public UChannelPlate(String name, SurveyVolume m,
@@ -626,9 +551,6 @@
/**
* Abstract {@link SurveyVolume} volume defining the coordinate system of the u-channel plate
-
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public abstract static class UChannelL13Plate extends UChannelPlate {
private final static double pocket_depth_L1 = 0.025;
@@ -689,9 +611,6 @@
* Reference: @UChannelL13Bottom coordinate system
* Origin: same as reference
* Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
*/
public static class UChannelL13BottomPlate extends UChannelL13Plate {
protected final static double L1_module_pin_to_edge_of_plate = (16.0-4.126)*inch;
@@ -715,10 +634,7 @@
* {@link SurveyVolume} volume defining the coordinate system of the bottom u-channel plate
* Reference: @UChannelL13Bottom coordinate system
* Origin: same as reference
- * Orientation: same as reference
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ * Orientation: same as reference
*/
public static class UChannelL13TopPlate extends UChannelL13Plate {
protected final static double L1_module_pin_to_edge_of_plate = (16.0-2.75)*inch;
@@ -737,12 +653,9 @@
}
}
-
-
+
/**
* Abstract {@link SurveyVolume} volume defining the L4-6 u-channel volume
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*/
public abstract static class UChannelL46 extends SurveyVolume {
@@ -760,26 +673,18 @@
protected void setBoxDim() {
setBoxDim(width,length,height);
- }
-
- }
-
-
-
-
-
+ }
+ }
+
/**
* {@link SurveyVolume} volume defining the coordinate system of the u-channel
* Reference: SVTBox coordinate system
* Origin: midpoint between upstream survey cones
* Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class UChannelL46Bottom extends UChannelL46 {
-
-
+
protected static final double cone_to_edge_of_plate_y = 2.75*inch;
public UChannelL46Bottom(String name, SurveyVolume m, AlignmentCorrection alignmentCorrection) {
@@ -803,15 +708,12 @@
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
private static class UChannelL46BottomSurveyBalls {
- // Shawn's calculated point at midpoint between two forward survey balls
+ // Shawn's calculated point at midpoint between two forward survey balls
protected final static Hep3Vector ball_pos = new BasicHep3Vector(-5.857, -157.776, -8.423);
-
private static final double cone_fwd_right_x = -7.019*inch;
private static final double cone_fwd_right_y = -6.419*inch;
@@ -841,22 +743,14 @@
return VecOp.sub(bwd_left, fwd_left);
}
}
-
-
-
-
/**
* {@link SurveyVolume} volume defining the coordinate system of the u-channel
- * Reference: {@link SVTBox} coordinate system
- * Origin: midpoint between upstream survey cones
- * Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
- *
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ * Reference: {@link HPSTracker2014GeometryDefinition.SVTBox} coordinate system
+ * Origin: midpoint between upstream survey cones
+ * Orientation: u - width pointing towards electron side, v - pointing along the U-channel in the beam direction
*/
public static class UChannelL46Top extends UChannelL46 {
-
private static final double cone_to_side_plate_pin_y = (0.875-0.25)*inch;
private static final double side_plate_pin_to_edge_of_plate_y = 1.5*inch;
@@ -886,8 +780,7 @@
//flatPos = new BasicHep3Vector(ballPos.x(), ballPos.y()-1, ballPos.z()); // random offset
}
}
-
-
+
/**
* Position of the center of the survey balls when engaging the cones in the side plates of the U-channel.
*
@@ -899,7 +792,6 @@
// Shawn's calculated point at midpoint between two forward survey balls
protected final static Hep3Vector ball_pos = new BasicHep3Vector(-6.341, -141.909, 8.423);
-
protected static final double cone_fwd_right_x = -7.038*inch;
protected static final double cone_fwd_right_y = -5.794*inch;
protected static final double cone_fwd_right_z = 0.332*inch;
@@ -928,14 +820,9 @@
return VecOp.sub(bwd_right, fwd_right);
}
}
-
-
-
- /**
- * Abstract {@link SurveyVolume} defining the coordinate system of the u-channel plates
-
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+
+ /**
+ * Abstract {@link SurveyVolume} defining the coordinate system of the u-channel plates
*/
public abstract static class UChannelL46Plate extends UChannelPlate {
public final static double pocket_depth_L4 = 0.1;
@@ -1043,16 +930,12 @@
}
}
-
-
-
+
/**
* {@link SurveyVolume} volume defining the coordinate system of module L1-3
* Reference: @UChannelL13Bottom coordinate system
* Origin: hole position on mounting surface (on electron side)
* Orientation: u - is normal to the surface pointing vertically down, v - points along module away from hybrid side (i.e. positron direction).
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public abstract static class ModuleL13 extends BaseModule {
@@ -1119,8 +1002,7 @@
protected double getHoleModuleCenterOffset() {
return UChannelL13Bottom.cone_to_edge_of_plate_y - UChannelL13BottomPlate.L1_module_pin_to_edge_of_plate;
}
- }
-
+ }
public static class ModuleL1Bot extends ModuleL13Bot {
@@ -1136,8 +1018,7 @@
return new BasicHep3Vector(x, y, z);
}
- }
-
+ }
public static class ModuleL1Top extends ModuleL13Top {
@@ -1155,9 +1036,7 @@
}
}
-
-
-
+
public static class ModuleL2Bot extends ModuleL13Bot {
public ModuleL2Bot(String name, SurveyVolume mother,
@@ -1190,10 +1069,7 @@
}
}
-
-
-
-
+
public static class ModuleL3Bot extends ModuleL13Bot {
public ModuleL3Bot(String name, SurveyVolume mother,
@@ -1226,11 +1102,7 @@
}
}
-
-
-
-
-
+
/**
* Abstract {@link SurveyVolume} volume defining the coordinate system of module L4-6
*
@@ -1628,8 +1500,6 @@
* Reference: @ModuleL13Bot coordinate system
* Origin: sensor center
* Orientation: w - is normal to the surface pointing from p-side to n-side, v - points along strips away from signal bond pads
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class HalfModuleAxial extends HPSTestRunTracker2014GeometryDefinition.TestRunHalfModule {
@@ -1668,8 +1538,6 @@
* Reference: @ModuleL13Bot coordinate system
* Origin: sensor center
* Orientation: same as axial - the module is rotated later.
- *
- * @author Per Hansson Adrian <[log in to unmask]>
*
*/
public static class HalfModuleStereo extends HPSTestRunTracker2014GeometryDefinition.TestRunHalfModule {
@@ -1873,23 +1741,21 @@
}
-
-
-
-
- /**
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ /**
+ *
*/
public static class LongModuleBundle extends BaseModuleBundle {
+
public HalfModuleBundle halfModuleAxialHole = null;
public HalfModuleBundle halfModuleStereoHole = null;
public HalfModuleBundle halfModuleAxialSlot = null;
public HalfModuleBundle halfModuleStereoSlot = null;
protected SurveyVolume coldBlock = null;
+
public LongModuleBundle(BaseModule m) {
super(m);
}
+
public void print() {
if(module!=null) System.out.printf("%s: %s\n", this.getClass().getSimpleName(),module.toString());
if(halfModuleAxialHole!=null) halfModuleAxialHole.print();
@@ -1897,13 +1763,11 @@
if(coldBlock!=null)System.out.printf("%s: %s\n", this.getClass().getSimpleName(),coldBlock.getName());
if(halfModuleStereoHole!=null) halfModuleStereoHole.print();
if(halfModuleStereoSlot!=null) halfModuleStereoSlot.print();
- }
- }
-
-
- /**
- * @author Per Hansson Adrian <[log in to unmask]>
- *
+ }
+ }
+
+ /**
+ *
*/
public static class LongHalfModuleBundle extends HalfModuleBundle {
public LongHalfModuleBundle() {
@@ -1913,9 +1777,7 @@
super(hm);
}
}
-
-
-
+
/**
* Create the half-module.
* @param side - stereo or axial
@@ -1955,9 +1817,6 @@
//TestRunModuleBundle bundle = (TestRunModuleBundle)getModuleBundle(mother);
//TestRunHalfModuleBundle halfModuleBundle;
LongModuleBundle bundle = (LongModuleBundle)getModuleBundle(mother);
-
-
-
// Build the half-module bundle and half-module
//TODO clean this up to a separate method
@@ -1983,9 +1842,6 @@
}
}
halfModuleBundle.halfModule = halfModule;
-
-
-
// create the half module components
makeHalfModuleComponentSensor(halfModule);
@@ -1994,13 +1850,8 @@
//makeHalfModuleComponentCF(halfModule);
- //makeHalfModuleComponentHybrid(halfModule);
-
-
-
-
- }
-
+ //makeHalfModuleComponentHybrid(halfModule);
+ }
protected void makeLongHalfModuleComponentKapton(BaseModule mother) {
@@ -2022,10 +1873,6 @@
}
-
-
-
-
protected HPSTestRunTracker2014GeometryDefinition.TestRunHalfModule createTestRunHalfModuleAxial(String volName,
BaseModule mother, AlignmentCorrection alignmentCorrection,
int layer, String half) {
@@ -2047,7 +1894,6 @@
* @param alignmentCorrection
* @param layer
* @param half
- * @return
*/
protected LongHalfModule createLongAxialSlotHalfModule(String name, SurveyVolume mother,
AlignmentCorrection alignmentCorrection, int layer,
@@ -2062,16 +1908,13 @@
* @param alignmentCorrection
* @param layer
* @param half
- * @return
*/
protected LongHalfModule createLongStereoSlotHalfModule(String name, SurveyVolume mother,
AlignmentCorrection alignmentCorrection, int layer,
String half) {
return new LongStereoSlotHalfModule(name, mother, alignmentCorrection, layer, half);
}
-
-
-
+
/* (non-Javadoc)
* @see org.lcsim.geometry.compact.converter.HPSTrackerGeometryDefinition#getHalfModuleBundle(org.lcsim.geometry.compact.converter.HPSTestRunTracker2014GeometryDefinition.BaseModule, java.lang.String)
*/
@@ -2121,9 +1964,6 @@
}
return hm;
}
-
-
-
/* (non-Javadoc)
* @see org.lcsim.geometry.compact.converter.HPSTrackerBuilder#getMillepedeLayer(java.lang.String)
@@ -2149,8 +1989,6 @@
return getMillepedeLayer(isTopLayer, layer, isAxial, isHole);
}
-
-
/**
* Definition relating the sensors and layer number used in millepede for this detector.
@@ -2158,7 +1996,6 @@
* @param layer
* @param isAxial
* @param isHole
- * @return
*/
public int getMillepedeLayer(boolean isTopLayer, int layer, boolean isAxial, boolean isHole) {
int l = -1;
@@ -2208,16 +2045,7 @@
if(l<0) throw new RuntimeException("Error getting the millepede layer.");
if(isDebug()) System.out.printf("%s: %s %d %s %s -> MP layer %d\n",getClass().getSimpleName(),isTopLayer?"top":"bottom", layer, isAxial?"axial":"stereo", isHole?"hole":"slot", l);
-
return l;
}
-
-
-
-
}
-
-
-
-
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTracker2014v1GeometryDefinition.java Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
/**
*
* Updated geometry information for the HPS tracker 2014
-
+ *
* @author Per Hansson Adrian <[log in to unmask]>
*
*/
@@ -93,8 +93,7 @@
/**
- * PI rotation around generic z-axis
- * @return
+ * PI rotation around generic z-axis
*/
private static Rotation getSlotRotation() {
return new Rotation(new Vector3D(0,0,1),Math.PI);
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/HPSTrackerBuilder.java Mon Mar 21 18:04:10 2016
@@ -632,7 +632,7 @@
/**
* Get hole or slot key name from string
*
- * @param name.
+ * @param name "hole" or "slot"
* @return hole or not boolean
*/
public static boolean isHoleFromName(String name) {
@@ -649,10 +649,7 @@
/**
* Extract old definition of Test Run sensor number.
*
- * @param isTopLayer - top or bottom layer
- * @param l - layer
- * @param isAxial - axial or stereo sensor
- * @return
+ * @return the geometric layer according to Test Run definition
*/
public int getOldGeomDefLayerFromVolumeName(String name) {
@@ -672,8 +669,7 @@
/**
* Get the layer number consistent with the old geometry definition.
*
- * @param module name that contains layer and half information.
- * @return the layer.
+ * @return the older layer definition
*/
public int getOldLayerDefinition(boolean isTopLayer, int l, boolean isAxial) {
int layer = -1;
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/compact/converter/LCDDSurveyVolume.java Mon Mar 21 18:04:10 2016
@@ -45,7 +45,7 @@
/**
* Initialize this object with a known volume and no mother. Typically the world volume would use this.
* @param surveyVolume - core geometry definitions
- * @param vol - given volume
+ * @param volume - given volume
*/
public LCDDSurveyVolume(SurveyVolume surveyVolume, Volume volume) {
super(surveyVolume);
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal.java Mon Mar 21 18:04:10 2016
@@ -15,39 +15,33 @@
/**
* Reconstruction version of HPS ECal with crystal array.
*
- * @author Jeremy McCormick <[log in to unmask]>
- * @author Timothy Nelson <[log in to unmask]>
- * @version $Id: HPSEcal.java,v 1.6 2011/07/28 20:20:18 jeremy Exp $
+ * @author Jeremy McCormick, SLAC
+ * @author Timothy Nelson, SLAC
*/
-public class HPSEcal extends AbstractSubdetector
-{
+public class HPSEcal extends AbstractSubdetector {
+
private int nx;
private int ny;
private double beamgap;
private double dface;
private boolean oddX;
- public static class NeighborMap extends HashMap<Long,Set<Long>>
- {
+ public static class NeighborMap extends HashMap<Long, Set<Long>> {
+
IIdentifierHelper helper;
- public NeighborMap(IIdentifierHelper helper)
- {
+
+ public NeighborMap(IIdentifierHelper helper) {
this.helper = helper;
}
-
- public String toString()
- {
+
+ public String toString() {
System.out.println("NeighborMap has " + this.size() + " entries.");
StringBuffer buff = new StringBuffer();
- for (long id : this.keySet())
- {
- buff.append(helper.unpack(new Identifier(id)))
- .append("\n");
- Set<Long> nei = this.get(id);
- for (long nid : nei)
- {
- buff.append(" " + helper.unpack(new Identifier(nid)))
- .append("\n");
+ for (long id : this.keySet()) {
+ buff.append(helper.unpack(new Identifier(id))).append("\n");
+ Set<Long> nei = this.get(id);
+ for (long nid : nei) {
+ buff.append(" " + helper.unpack(new Identifier(nid))).append("\n");
}
}
return buff.toString();
@@ -56,308 +50,270 @@
private NeighborMap neighborMap = null;
- HPSEcal(Element node) throws JDOMException
- {
+ HPSEcal(Element node) throws JDOMException {
super(node);
-
+
Element layout = node.getChild("layout");
-
+
nx = layout.getAttribute("nx").getIntValue();
ny = layout.getAttribute("ny").getIntValue();
beamgap = layout.getAttribute("beamgap").getDoubleValue();
dface = layout.getAttribute("dface").getDoubleValue();
-
- if (nx % 2 != 0)
+
+ if (nx % 2 != 0) {
oddX = true;
- }
-
- public double distanceToFace()
- {
+ }
+ }
+
+ public double distanceToFace() {
return dface;
}
-
- public double beamGap()
- {
+
+ public double beamGap() {
return beamgap;
}
-
+
/**
* The number of crystals in X in one section.
- * @return
+ *
+ * @return the number of crystals in X in one section
*/
- public double nx()
- {
+ public double nx() {
return nx;
}
-
+
/**
* The number of crystals in y in one section.
- * @return
+ *
+ * @return the number of crystals in Y in one section
*/
- public double ny()
- {
+ public double ny() {
return ny;
- }
-
+ }
+
// Class for storing neighbor incides in XY and side.
- static class XYSide implements Comparator<XYSide>
- {
+ static class XYSide implements Comparator<XYSide> {
+
int x;
int y;
int side;
-
- public XYSide(int x, int y, int side)
- {
+
+ public XYSide(int x, int y, int side) {
this.x = x;
this.y = y;
this.side = side;
}
-
- public int x()
- {
+
+ public int x() {
return x;
}
-
- public int y()
- {
+
+ public int y() {
return y;
}
-
- public int side()
- {
+
+ public int side() {
return side;
}
-
- public boolean equals(Object o)
- {
- XYSide xy = (XYSide)o;
- return xy.x() == x && xy.y() == y && xy.side() == side;
- }
-
- public int compare(XYSide o1, XYSide o2)
- {
- if (o1.equals(o2))
- {
+
+ public boolean equals(Object o) {
+ XYSide xy = (XYSide) o;
+ return xy.x() == x && xy.y() == y && xy.side() == side;
+ }
+
+ public int compare(XYSide o1, XYSide o2) {
+ if (o1.equals(o2)) {
return 0;
- }
- else
- {
+ } else {
return -1;
}
}
}
-
+
/**
- * Get the neighbors for a given cell ID. Each crystal not on an edge
- * has 8 neighbors. Edge crystals have fewer.
+ * Get the neighbors for a given cell ID. Each crystal not on an edge has 8 neighbors. Edge crystals have fewer.
+ *
* @param id The cell ID.
* @return A <code>Set</code> containing the cell's neighbors.
*/
- Set<Long> getNeighbors(Long id)
- {
+ Set<Long> getNeighbors(Long id) {
// Get the IDDecoder.
- IDDecoder dec = getIDDecoder();
-
+ IDDecoder dec = getIDDecoder();
+
// Set the ID.
dec.setID(id);
-
+
// Get ID field values.
int x = dec.getValue("ix");
int y = dec.getValue("iy");
int side = dec.getValue("side");
-
+
// Get field indices.
int ix = dec.getFieldIndex("ix");
int iy = dec.getFieldIndex("iy");
int iside = dec.getFieldIndex("side");
-
+
// Get X, Y, & side neighbor data for this crystal.
Set<XYSide> neighbors = getNeighbors(x, y, side);
// Get buffer with values from current ID.
int[] buffer = new int[dec.getFieldCount()];
- dec.getValues(buffer);
-
+ dec.getValues(buffer);
+
// Create an encoder to make neighbor IDs.
IDEncoder enc = new IDEncoder(dec.getIDDescription());
-
+
// Set to hold neighbor IDs.
Set<Long> ids = new HashSet<Long>();
-
+
// Loop over neighbor objects to make IDs.
- for (XYSide xyside : neighbors)
- {
+ for (XYSide xyside : neighbors) {
buffer[ix] = xyside.x;
buffer[iy] = xyside.y;
buffer[iside] = xyside.side;
long nId = enc.setValues(buffer);
ids.add(nId);
}
-
+
return ids;
}
-
- Set<XYSide> getNeighbors(int ix, int iy, int side)
- {
+
+ Set<XYSide> getNeighbors(int ix, int iy, int side) {
Set<Integer> xneighbors = getXNeighbors(ix);
Set<Integer> yneighbors = getYNeighbors(iy);
-
+
Set<XYSide> neighbors = new HashSet<XYSide>();
-
- for (Integer jx : xneighbors)
- {
- for (Integer jy : yneighbors)
- {
+
+ for (Integer jx : xneighbors) {
+ for (Integer jy : yneighbors) {
// Filter out self.
- if (jx == ix && jy == iy)
- {
+ if (jx == ix && jy == iy) {
continue;
}
-
- neighbors.add(new XYSide(jx,jy,side));
+
+ neighbors.add(new XYSide(jx, jy, side));
}
}
-
+
return neighbors;
}
-
- Set<Integer> getXNeighbors(int ix)
- {
+
+ Set<Integer> getXNeighbors(int ix) {
Set<Integer> neighbors = new HashSet<Integer>();
-
+
// Add self.
neighbors.add(ix);
-
+
// Left neighbor.
- if (isValidX(ix - 1))
- {
+ if (isValidX(ix - 1)) {
neighbors.add(ix - 1);
- }
- else if (isValidX(ix - 2))
- {
+ } else if (isValidX(ix - 2)) {
neighbors.add(ix - 2);
}
-
+
// Right neighbor.
- if (isValidX(ix + 1))
- {
+ if (isValidX(ix + 1)) {
neighbors.add(ix + 1);
- }
- else if (isValidX(ix + 2))
- {
+ } else if (isValidX(ix + 2)) {
neighbors.add(ix + 2);
- }
-
+ }
+
return neighbors;
}
-
- Set<Integer> getYNeighbors(int iy)
- {
+
+ Set<Integer> getYNeighbors(int iy) {
Set<Integer> neighbors = new HashSet<Integer>();
-
+
// Add self.
neighbors.add(iy);
-
+
// Lower neighbor.
- if (isValidY(iy - 1))
- {
+ if (isValidY(iy - 1)) {
neighbors.add(iy - 1);
}
// Upper neighbor.
- if (isValidY(iy + 1))
- {
+ if (isValidY(iy + 1)) {
neighbors.add(iy + 1);
}
-
+
return neighbors;
}
-
- boolean isValidY(int iy)
- {
+
+ boolean isValidY(int iy) {
// Zero is not valid because ID scheme goes from 1.
return iy > 0 && iy <= ny;
}
-
- boolean isValidX(int ix)
- {
+
+ boolean isValidX(int ix) {
// Even case.
- if (!oddX)
- {
- return ix >= -nx/2 && ix <= nx/2 && ix != 0;
+ if (!oddX) {
+ return ix >= -nx / 2 && ix <= nx / 2 && ix != 0;
}
// Odd case.
- else
- {
- return ix >= (-nx-1)/2 && ix <= (nx+1)/2;
- }
- }
-
+ else {
+ return ix >= (-nx - 1) / 2 && ix <= (nx + 1) / 2;
+ }
+ }
+
/**
* Create a map of crystal IDs to the <code>Set</code> of neighbor crystal IDs.
+ *
* @return A map of neighbors for each crystal ID.
*/
- public NeighborMap getNeighborMap()
- {
- if (neighborMap != null)
- {
+ public NeighborMap getNeighborMap() {
+ if (neighborMap != null) {
return neighborMap;
}
-
- // Setup the private instance of the map.
+
+ // Setup the private instance of the map.
neighborMap = new NeighborMap(this.getDetectorElement().getIdentifierHelper());
-
+
IDDecoder dec = getIDDecoder();
IDEncoder enc = new IDEncoder(dec.getIDDescription());
-
+
int nfields = dec.getFieldCount();
int[] vals = new int[nfields];
vals[dec.getFieldIndex("system")] = getSystemID();
-
+
int idxx = dec.getFieldIndex("ix");
int idxy = dec.getFieldIndex("iy");
-
- int hnx = nx;
-
- // Calculate number of X for loop. (from LCDD conv)
- if (oddX)
- {
+
+ int hnx = nx;
+
+ // Calculate number of X for loop. (from LCDD conv)
+ if (oddX) {
hnx -= 1;
hnx /= 2;
- }
- else
- {
+ } else {
hnx /= 2;
}
-
- for (int side=-1; side <=1; side++)
- {
- if (side == 0) continue;
+
+ for (int side = -1; side <= 1; side++) {
+ if (side == 0)
+ continue;
vals[dec.getFieldIndex("side")] = side;
// Loop over y.
- for (int iy=1; iy<=ny; iy++)
- {
+ for (int iy = 1; iy <= ny; iy++) {
// Loop over x.
- for (int ix=0; ix<=hnx; ix++)
- {
+ for (int ix = 0; ix <= hnx; ix++) {
// Loop for positive and negative x.
- for (int j=-1; j<=1; j++)
- {
+ for (int j = -1; j <= 1; j++) {
if (j == 0)
continue;
-
- vals[idxx] = ix*j;
+
+ vals[idxx] = ix * j;
vals[idxy] = iy;
-
+
Long id = enc.setValues(vals);
Set<Long> neighbors = getNeighbors(id);
-
+
neighborMap.put(id, neighbors);
}
}
}
}
-
+
return neighborMap;
- }
+ }
}
Modified: java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java
=============================================================================
--- java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java (original)
+++ java/branches/jeremy-dev2/detector-model/src/main/java/org/lcsim/geometry/subdetector/HPSEcal3.java Mon Mar 21 18:04:10 2016
@@ -150,7 +150,7 @@
/**
* The number of crystals in X in one section.
*
- * @return
+ * @return the number of crystals in X in one section
*/
public double nx() {
return nx;
@@ -159,7 +159,7 @@
/**
* The number of crystals in y in one section.
*
- * @return
+ * @return the number of crystals in Y in one section
*/
public double ny() {
return ny;
Modified: java/branches/jeremy-dev2/distribution/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/distribution/pom.xml (original)
+++ java/branches/jeremy-dev2/distribution/pom.xml Mon Mar 21 18:04:10 2016
@@ -13,7 +13,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/distribution/</url>
Modified: java/branches/jeremy-dev2/ecal-event-display/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/ecal-event-display/pom.xml (original)
+++ java/branches/jeremy-dev2/ecal-event-display/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-event-display/</url>
Modified: java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java (original)
+++ java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/io/TextManager.java Mon Mar 21 18:04:10 2016
@@ -53,7 +53,7 @@
/**
* Initializes an event manager that will read from the indicated file.
- * @param filename - The path to the file containing hit information.
+ * @param file - The path to the file containing hit information.
*/
public TextManager(File file) throws IOException {
reader = new AdvancedReader(file);
Modified: java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java (original)
+++ java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/ActiveViewer.java Mon Mar 21 18:04:10 2016
@@ -34,8 +34,6 @@
* events from the indicated data source with additional status
* fields defined by the <code>fieldNames</code> argument.
* @param em - The data source event manager.
- * @param fieldNames - An array of additional status fields
- * that should be displayed.
*/
public ActiveViewer(EventManager em) {
// Pass any additional field values to the super class.
Modified: java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java (original)
+++ java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/CalorimeterPanel.java Mon Mar 21 18:04:10 2016
@@ -503,8 +503,8 @@
/**
* Determines if the crystal at the given coordinates is a active
* or not.
- * @param xCoor - The x-index of the crystal.
- * @param yCoor - The y-index of the crystal.
+ * @param ix - The x-index of the crystal.
+ * @param iy - The y-index of the crystal.
* @return Returns <code>true</code> if the crystal is active
* and <code>false</code> if it is not.
* @throws IndexOutOfBoundsException Occurs when either of the given
@@ -740,28 +740,28 @@
/**
* Sets whether to mirror the x-axis on the calorimeter display.
- * @param state - <code>true</code> indicates that the axis should
+ * @param mirrorX - <code>true</code> indicates that the axis should
* be mirrored and <code>false</code> that it should not.
*/
public void setMirrorX(boolean mirrorX) {
- // Process the change.
- setMirror(mirrorX, mirrorY);
-
- // Throw an event.
- throwSettingsEvent(SettingsEvent.PROPERTY_X_ORIENTATION);
+ // Process the change.
+ setMirror(mirrorX, mirrorY);
+
+ // Throw an event.
+ throwSettingsEvent(SettingsEvent.PROPERTY_X_ORIENTATION);
}
/**
* Sets whether to mirror the y-axis on the calorimeter display.
- * @param state - <code>true</code> indicates that the axis should
+ * @param mirrorY - <code>true</code> indicates that the axis should
* be mirrored and <code>false</code> that it should not.
*/
public void setMirrorY(boolean mirrorY) {
- // Process the change.
- setMirror(mirrorX, mirrorY);
-
- // Throw an event.
- throwSettingsEvent(SettingsEvent.PROPERTY_Y_ORIENTATION);
+ // Process the change.
+ setMirror(mirrorX, mirrorY);
+
+ // Throw an event.
+ throwSettingsEvent(SettingsEvent.PROPERTY_Y_ORIENTATION);
}
/**
Modified: java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java (original)
+++ java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PDataEventViewer.java Mon Mar 21 18:04:10 2016
@@ -57,7 +57,6 @@
* Initializes a new <code>DataFileViewer</code> that reads from
* the given event manager for event data and the given hardware
* data file for crystal hardware data readout.
- * @param dataSource - The manager for event data.
* @param crystalDataFilePath - The data file for crystal hardware
* information.
* @throws IOException Occurs if there is an error reading from
Modified: java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java (original)
+++ java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/PassiveViewer.java Mon Mar 21 18:04:10 2016
@@ -16,7 +16,7 @@
/**
* Adds a new hit to the display.
- * @param hit - The hit to be added.
+ * @param lcioHit - The hit to be added.
*/
public abstract void addHit(CalorimeterHit lcioHit);
Modified: java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java (original)
+++ java/branches/jeremy-dev2/ecal-event-display/src/main/java/org/hps/monitoring/ecal/eventdisplay/ui/Viewer.java Mon Mar 21 18:04:10 2016
@@ -122,8 +122,6 @@
/**
* Initializes the viewer window and calorimeter panel.
- * @param statusFields - Additional fields to display in the status
- * panel. This can not be <code>null</code>.
* @throws NullPointerException Occurs if any of the additional field
* arguments are <code>null</code>.
**/
Modified: java/branches/jeremy-dev2/ecal-readout-sim/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/ecal-readout-sim/pom.xml (original)
+++ java/branches/jeremy-dev2/ecal-readout-sim/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-readout-sim/</url>
Modified: java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java (original)
+++ java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCEcalReadoutDriver.java Mon Mar 21 18:04:10 2016
@@ -310,7 +310,7 @@
/**
* Return the map of preamp signal buffers. For debug only.
*
- * @return
+ * @return the map of preamp signal buffers
*/
public Map<Long, RingBuffer> getSignalMap() {
return analogPipelines;
@@ -319,7 +319,7 @@
/**
* Return the map of FADC pipelines. For debug only.
*
- * @return
+ * @return the map of FADC pipelines
*/
public Map<Long, FADCPipeline> getPipelineMap() {
return digitalPipelines;
Modified: java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java (original)
+++ java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCPrimaryTriggerDriver.java Mon Mar 21 18:04:10 2016
@@ -368,7 +368,7 @@
* Sets the maximum deviation from coplanarity that a cluster pair
* may possess and still pass the coplanarity pair cut. Value uses
* units of degrees.
- * @param maxCoplanarityAngle - The parameter value.
+ * @param coplanarityHigh - The parameter value.
*/
public void setCoplanarityHigh(double coplanarityHigh) {
triggerModule.setCutValue(TriggerModule.PAIR_COPLANARITY_HIGH, coplanarityHigh);
@@ -407,7 +407,7 @@
* Sets the lowest allowed energy a cluster pair may have and
* still pass the cluster pair energy sum cluster cut. Value uses
* units of GeV.
- * @param energySumHigh - The parameter value.
+ * @param energySumLow - The parameter value.
*/
public void setEnergySumLow(double energySumLow) {
triggerModule.setCutValue(TriggerModule.PAIR_ENERGY_SUM_LOW, energySumLow * EcalUtils.GeV);
Modified: java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java (original)
+++ java/branches/jeremy-dev2/ecal-readout-sim/src/main/java/org/hps/readout/ecal/FADCTriggerDriver.java Mon Mar 21 18:04:10 2016
@@ -532,7 +532,6 @@
/**
* Get a list of all unique cluster pairs in the event
*
- * @param ecalClusters : List of ECal clusters
* @return list of cluster pairs
*/
protected List<Cluster[]> getClusterPairsTopBot() {
@@ -584,7 +583,7 @@
* Checks if the ECal clusters making up a cluster pair both have at least
* the minimum number of hits.
*
- * @param clusterPair: pair of clusters
+ * @param clusterPair the pair of clusters
* @return true if pair passes cut, false if fail
*/
protected boolean clusterHitCount(Cluster[] clusterPair) {
Modified: java/branches/jeremy-dev2/ecal-recon/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/pom.xml (original)
+++ java/branches/jeremy-dev2/ecal-recon/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/ecal-recon/</url>
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalCalibrationsDriver.java Mon Mar 21 18:04:10 2016
@@ -35,7 +35,7 @@
import org.lcsim.util.aida.AIDA;
/**
- * This Driver will generate a {@link org.hps.conditions.EcalCalibration} collection
+ * This Driver will generate a {@link org.hps.conditions.ecal.EcalCalibration} collection
* from the ADC value distributions of raw ECAL data. It may optionally insert this
* information into the conditions database using the file's run number.
*
@@ -46,20 +46,20 @@
*/
public class EcalCalibrationsDriver extends Driver {
- EcalConditions ecalConditions = null;
- DatabaseConditionsManager conditionsManager = null;
- AIDA aida = AIDA.defaultInstance();
- IFunctionFactory functionFactory = aida.analysisFactory().createFunctionFactory(null);
- IFitFactory fitFactory = aida.analysisFactory().createFitFactory();
- boolean loadCalibrations = false;
- boolean performFit = true;
- Integer runStart = null;
- Integer runEnd = null;
- File outputFile = null;
- Set<Integer> runs = new HashSet<Integer>();
- static DecimalFormat decimalFormat = new DecimalFormat("#.####");
- String inputHitsCollectionName = "EcalReadoutHits";
- static String ECAL_CALIBRATIONS = "ecal_calibrations";
+ private EcalConditions ecalConditions = null;
+ private DatabaseConditionsManager conditionsManager = null;
+ private AIDA aida = AIDA.defaultInstance();
+ private IFunctionFactory functionFactory = aida.analysisFactory().createFunctionFactory(null);
+ private IFitFactory fitFactory = aida.analysisFactory().createFitFactory();
+ private boolean loadCalibrations = false;
+ private boolean performFit = true;
+ private Integer runStart = null;
+ private Integer runEnd = null;
+ private File outputFile = null;
+ private Set<Integer> runs = new HashSet<Integer>();
+ private static DecimalFormat DECIMAL_FORMAT = new DecimalFormat("#.####");
+ private String inputHitsCollectionName = "EcalReadoutHits";
+ private static String ECAL_CALIBRATIONS = "ecal_calibrations";
/**
* Set the RawTrackerHit collection of hits to be used for the calibration.
@@ -91,7 +91,7 @@
/**
* Set the end run number for the conditions record.
* It must be >= the runEnd.
- * @param runStart The run start number.
+ * @param runEnd The run end number.
*/
public void setRunEnd(int runEnd) {
if (runEnd < 0) {
@@ -216,8 +216,8 @@
}
// Truncate to 4 decimal places.
- mean = Double.valueOf(decimalFormat.format(mean));
- sigma = Double.valueOf(decimalFormat.format(sigma));
+ mean = Double.valueOf(DECIMAL_FORMAT.format(mean));
+ sigma = Double.valueOf(DECIMAL_FORMAT.format(sigma));
// Create a new calibration object and add it to the collection, using mean for pedestal
// and sigma for noise.
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/EcalRawConverterDriver.java Mon Mar 21 18:04:10 2016
@@ -404,18 +404,17 @@
/**
* @return false if the channel is a good one, true if it is a bad one
- * @param CalorimeterHit
+ * @param hit the <code>CalorimeterHit</code> pointing to the channel
*/
public boolean isBadCrystal(CalorimeterHit hit) {
// Get the channel data.
EcalChannelConstants channelData = findChannel(hit.getCellID());
-
return channelData.isBadChannel();
}
/**
* @return false if the ADC is a good one, true if it is a bad one
- * @param CalorimeterHit
+ * @param hit the <code>CalorimeterHit</code> pointing to the FADC
*/
public boolean isBadFADC(CalorimeterHit hit) {
return (getCrate(hit.getCellID()) == 1 && getSlot(hit.getCellID()) == 3);
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/AbstractClusterer.java Mon Mar 21 18:04:10 2016
@@ -16,7 +16,7 @@
/**
* This is an abstract class that {@link Clusterer} classes should implement
* to perform a clustering algorithm on a <code>CalorimeterHit</code> collection.
- * The sub-class should implement {@link #createClusters(List)} which is
+ * The sub-class should implement {@link #createClusters(EventHeader, List)} which is
* the method that should perform the clustering algorithm.
*
* @see Clusterer
@@ -69,8 +69,8 @@
/**
* This is the primary method for sub-classes to implement their clustering algorithm.
- * @param hits
- * @return
+ * @param hits the list of hits
+ * @return the list of created clusters
*/
public abstract List<Cluster> createClusters(EventHeader event, List<CalorimeterHit> hits);
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterDriver.java Mon Mar 21 18:04:10 2016
@@ -73,7 +73,7 @@
/**
* Set the name of the input CalorimeterHit collection to use for clustering.
- * @param inputHitcollectionName The name of the input hit collection.
+ * @param inputHitCollectionName The name of the input hit collection.
*/
public void setInputHitCollectionName(String inputHitCollectionName) {
this.inputHitCollectionName = inputHitCollectionName;
@@ -128,7 +128,7 @@
* This will use a factory method which first tries to use some hard-coded names from
* the cluster package. As a last resort, it will interpret the name as a canonical
* class name and try to instantiate it using the Class API.
- * @param The name or canonical class name of the Clusterer.
+ * @param name The name or canonical class name of the Clusterer.
*/
public void setClustererName(String name) {
clusterer = ClustererFactory.create(name);
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterEnergyCorrection.java Mon Mar 21 18:04:10 2016
@@ -5,161 +5,204 @@
import org.hps.detector.ecal.EcalCrystal;
import org.hps.detector.ecal.HPSEcalDetectorElement;
import org.jdom.DataConversionException;
-//import org.hps.recon.tracking.TrackUtils;
+// import org.hps.recon.tracking.TrackUtils;
import org.lcsim.event.Cluster;
import org.lcsim.event.base.BaseCluster;
import org.lcsim.geometry.subdetector.HPSEcal3;
/**
- * This is the cluster energy correction requiring the particle id
- * uncorrected cluster energy. This is now updated to include edge
- * corrections and sampling fractions derived from data.
+ * This is the cluster energy correction requiring the particle id uncorrected
+ * cluster energy. This is now updated to include edge corrections and sampling
+ * fractions derived from data.
*
* @author Holly Vance <[log in to unmask]>
* @author Jeremy McCormick <[log in to unmask]>
*/
public final class ClusterEnergyCorrection {
-
+
// Variables for electron energy corrections.
static final double par0_em = -0.017;
- static final double par1_em[] = {35,-0.06738,-0.0005613,16.42,0.3431,-2.021,74.85,-0.3626};
- static final double par2_em[] = {35, 0.933, 0.003234, 18.06, 0.24, 8.586, 75.08, -0.39};
+ static final double par1_em[] = { 35, -0.06738, -0.0005613, 16.42, 0.3431,
+ -2.021, 74.85, -0.3626 };
+ static final double par2_em[] = { 35, 0.933, 0.003234, 18.06, 0.24, 8.586,
+ 75.08, -0.39 };
// Variables for positron energy corrections.
static final double par0_ep = -0.0131;
- static final double par1_ep[] = {35,-0.076,-0.0008183,17.88,0.2886,-1.192,73.12,-0.3747};
- static final double par2_ep[] = {35, 0.94, 0.003713, 18.19, 0.24, 8.342, 72.44, -0.39};
-
+ static final double par1_ep[] = { 35, -0.076, -0.0008183, 17.88, 0.2886,
+ -1.192, 73.12, -0.3747 };
+ static final double par2_ep[] = { 35, 0.94, 0.003713, 18.19, 0.24, 8.342,
+ 72.44, -0.39 };
+
// Variables for photon energy corrections.
static final double par0_p = -0.0113;
- static final double par1_p[] = {35,-0.0585,-0.0008572,16.76,0.2784,-0.07232,72.88,-0.1685};
- static final double par2_p[] = {35, 0.9307, 0.004, 18.05, 0.23, 3.027, 74.93, -0.34};
-
+ static final double par1_p[] = { 35, -0.0585, -0.0008572, 16.76, 0.2784,
+ -0.07232, 72.88, -0.1685 };
+ static final double par2_p[] = { 35, 0.9307, 0.004, 18.05, 0.23, 3.027,
+ 74.93, -0.34 };
+
/**
* Calculate the corrected energy for the cluster.
- * @param cluster The input cluster.
+ *
+ * @param cluster
+ * The input cluster.
* @return The corrected energy.
*/
public static double calculateCorrectedEnergy(HPSEcal3 ecal, Cluster cluster) {
double rawE = cluster.getEnergy();
- return computeCorrectedEnergy(ecal, cluster.getParticleId(), rawE, cluster.getPosition()[0], cluster.getPosition()[1]);
- }
-
- /**
- * Calculate the corrected energy for the cluster using track position at ecal.
- * @param cluster The input cluster.
+ return computeCorrectedEnergy(ecal, cluster.getParticleId(), rawE,
+ cluster.getPosition()[0], cluster.getPosition()[1]);
+ }
+
+ /**
+ * Calculate the corrected energy for the cluster using track position at
+ * ecal.
+ *
+ * @param cluster
+ * The input cluster.
* @return The corrected energy.
*/
- public static double calculateCorrectedEnergy(HPSEcal3 ecal, Cluster cluster, double ypos) {
+ public static double calculateCorrectedEnergy(HPSEcal3 ecal,
+ Cluster cluster, double ypos) {
double rawE = cluster.getEnergy();
- return computeCorrectedEnergy(ecal, cluster.getParticleId(), rawE, cluster.getPosition()[0], ypos);
- }
-
+ return computeCorrectedEnergy(ecal, cluster.getParticleId(), rawE,
+ cluster.getPosition()[0], ypos);
+ }
+
/**
* Calculate the corrected energy and set on the cluster.
- * @param cluster The input cluster.
+ *
+ * @param cluster
+ * The input cluster.
*/
public static void setCorrectedEnergy(HPSEcal3 ecal, BaseCluster cluster) {
double correctedEnergy = calculateCorrectedEnergy(ecal, cluster);
cluster.setEnergy(correctedEnergy);
}
-
+
/**
* Calculate the corrected energy and set on the cluster.
- * @param cluster The input cluster.
- */
-
- public static void setCorrectedEnergy(HPSEcal3 ecal, BaseCluster cluster, double ypos) {
+ *
+ * @param cluster
+ * The input cluster.
+ */
+
+ public static void setCorrectedEnergy(HPSEcal3 ecal, BaseCluster cluster,
+ double ypos) {
double correctedEnergy = calculateCorrectedEnergy(ecal, cluster, ypos);
cluster.setEnergy(correctedEnergy);
}
-
- /**
- * Calculates energy correction based on cluster raw energy and particle type as per
- * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
- * @param pdg Particle id as per PDG
- * @param rawEnergy Raw Energy of the cluster (sum of hits with shared hit distribution)
+
+ /**
+ * Calculates energy correction based on cluster raw energy and particle
+ * type as per <a href=
+ * "https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014"
+ * >HPS Note 2014-001</a>
+ *
+ * @param pdg
+ * Particle id as per PDG
+ * @param rawEnergy
+ * Raw Energy of the cluster (sum of hits with shared hit
+ * distribution)
* @return Corrected Energy
- */
-
- private static double computeCorrectedEnergy(HPSEcal3 ecal, int pdg, double rawEnergy, double xpos, double ypos) {
- //distance to beam gap edge
+ */
+
+ private static double computeCorrectedEnergy(HPSEcal3 ecal, int pdg,
+ double rawEnergy, double xpos, double ypos) {
+ // distance to beam gap edge
double r;
- //Get these values from the Ecal geometry:
- HPSEcalDetectorElement detElement = (HPSEcalDetectorElement) ecal.getDetectorElement();
-// double BEAMGAPTOP = 22.3;//ecal.getNode().getChild("layout").getAttribute("beamgapTop").getDoubleValue();//mm
- double BEAMGAPTOP=20.0;
+ // Get these values from the Ecal geometry:
+ HPSEcalDetectorElement detElement = (HPSEcalDetectorElement) ecal
+ .getDetectorElement();
+ // double BEAMGAPTOP =
+ // 22.3;//ecal.getNode().getChild("layout").getAttribute("beamgapTop").getDoubleValue();//mm
+ double BEAMGAPTOP = 20.0;
try {
- BEAMGAPTOP = ecal.getNode().getChild("layout").getAttribute("beamgapTop").getDoubleValue();
+ BEAMGAPTOP = ecal.getNode().getChild("layout")
+ .getAttribute("beamgapTop").getDoubleValue();
} catch (DataConversionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
- }//mm
- double BEAMGAPBOT=-20.0;
+ }// mm
+ double BEAMGAPBOT = -20.0;
try {
- BEAMGAPBOT = -ecal.getNode().getChild("layout").getAttribute("beamgapBottom").getDoubleValue();
+ BEAMGAPBOT = -ecal.getNode().getChild("layout")
+ .getAttribute("beamgapBottom").getDoubleValue();
} catch (DataConversionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
- }//mm
- double BEAMGAPTOPC = BEAMGAPTOP + 13.0;//mm
- double BEAMGAPBOTC = BEAMGAPBOT - 13.0;//mm
+ }// mm
+ double BEAMGAPTOPC = BEAMGAPTOP + 13.0;// mm
+ double BEAMGAPBOTC = BEAMGAPBOT - 13.0;// mm
// x-coordinates of crystals on either side of row 1 cut out
EcalCrystal crystalM = detElement.getCrystal(-11, 1);
Hep3Vector posM = crystalM.getPositionFront();
EcalCrystal crystalP = detElement.getCrystal(-1, 1);
Hep3Vector posP = crystalP.getPositionFront();
-
- if ((xpos<posM.x())||(xpos>posP.x())){
- if (ypos>0){
- r = Math.abs(ypos-BEAMGAPTOP);}
- else{
- r = Math.abs(ypos-BEAMGAPBOT);}
+
+ if ((xpos < posM.x()) || (xpos > posP.x())) {
+ if (ypos > 0) {
+ r = Math.abs(ypos - BEAMGAPTOP);
+ } else {
+ r = Math.abs(ypos - BEAMGAPBOT);
+ }
}
// crystals above row 1 cut out
else {
- if (ypos>0){
- if (ypos>(par1_em[0]+BEAMGAPTOP)){
- r = Math.abs(ypos-BEAMGAPTOP);}
- else{
- r = Math.abs(ypos-BEAMGAPTOPC);}
- }
- else {
- if (ypos>(-par1_em[0]+BEAMGAPBOT)){
- r = Math.abs(ypos-BEAMGAPBOTC);}
- else {
- r = Math.abs(ypos-BEAMGAPBOT);}
+ if (ypos > 0) {
+ if (ypos > (par1_em[0] + BEAMGAPTOP)) {
+ r = Math.abs(ypos - BEAMGAPTOP);
+ } else {
+ r = Math.abs(ypos - BEAMGAPTOPC);
+ }
+ } else {
+ if (ypos > (-par1_em[0] + BEAMGAPBOT)) {
+ r = Math.abs(ypos - BEAMGAPBOTC);
+ } else {
+ r = Math.abs(ypos - BEAMGAPBOT);
+ }
}
}
-
- switch(pdg) {
- case 11:
- // electron
- return computeCorrectedEnergy(r, rawEnergy, par0_em, par1_em, par2_em);
- case -11:
- // positron
- return computeCorrectedEnergy(r, rawEnergy, par0_ep, par1_ep, par2_ep);
- case 22:
- // photon
- return computeCorrectedEnergy(r, rawEnergy, par0_p, par1_p, par2_p);
- default:
- // unknown
- return rawEnergy;
+
+ //Eliminates corrections at outermost edges to negative cluster energies
+ //66 for positrons, 69 is safe for electrons and photons
+ if (r > 66) {r = 66;}
+
+ switch (pdg) {
+ case 11:
+ // electron
+ return computeCorrectedEnergy(r, rawEnergy, par0_em, par1_em,
+ par2_em);
+ case -11:
+ // positron
+ return computeCorrectedEnergy(r, rawEnergy, par0_ep, par1_ep,
+ par2_ep);
+ case 22:
+ // photon
+ return computeCorrectedEnergy(r, rawEnergy, par0_p, par1_p, par2_p);
+ default:
+ // unknown
+ return rawEnergy;
}
}
-
- /**
- * Calculates the energy correction to a cluster given the variables from the fit as per
- * <a href="https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014">HPS Note 2014-001</a>
- * Note that this is correct as there is a typo in the formula print in the note.
- * @param rawEnergy Raw energy of the cluster
+
+ /**
+ * Calculates the energy correction to a cluster given the variables from
+ * the fit as per <a href=
+ * "https://misportal.jlab.org/mis/physics/hps_notes/index.cfm?note_year=2014"
+ * >HPS Note 2014-001</a> Note that this is correct as there is a typo in
+ * the formula print in the note.
+ *
+ * @param rawEnergy
+ * Raw energy of the cluster
* @param A,B,C from fitting in note
* @return Corrected Energy
- */
- private static double computeCorrectedEnergy(double y, double rawEnergy, double varA, double varB[], double varC[]){
- int ii = y<varB[0] ? 2 : 5;
- double corrEnergy = rawEnergy / (varA / rawEnergy + (varB[1]-varB[ii]*Math.exp(-(y-varB[ii+1])*varB[ii+2])) / (Math.sqrt(rawEnergy)) +
- (varC[1]-varC[ii]*Math.exp(-(y-varC[ii+1])*varC[ii+2])));
+ */
+ private static double computeCorrectedEnergy(double y, double rawEnergy,
+ double varA, double varB[], double varC[]) {
+ int ii = y < varB[0] ? 2 : 5;
+ double corrEnergy = rawEnergy/ (varA / rawEnergy+ (varB[1] - varB[ii]* Math.exp(-(y - varB[ii + 1]) * varB[ii + 2]))/ (Math.sqrt(rawEnergy)) +
+ (varC[1] - varC[ii]* Math.exp(-(y - varC[ii + 1]) * varC[ii + 2])));
return corrEnergy;
- }
+ }
}
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterPositionCorrection.java Mon Mar 21 18:04:10 2016
@@ -58,7 +58,7 @@
* @param pdg Particle id as per PDG
* @param xCl Calculated x centroid position of the cluster, uncorrected, at face
* @param rawEnergy Raw energy of the cluster (sum of hits with shared hit distribution)
- * @return Corrected x position
+ * @return the corrected x position
*/
private static double computeCorrectedPosition(int pdg, double xPos, double rawEnergy) {
//double xCl = xPos / 10.0;//convert to cm
@@ -91,7 +91,7 @@
* @param varB1
* @param varB2
* @param varB3
- * @return
+ * @return the cluster position correction
*/
private static double positionCorrection(double xCl, double rawEnergy, double varA1, double varA2, double varB1, double varB2, double varB3) {
//return ((xCl - (varB1 * rawEnergy + varB2 / Math.sqrt(rawEnergy) + varB3))/(varA1 / Math.sqrt(rawEnergy) + varA2 + 1));
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClusterUtilities.java Mon Mar 21 18:04:10 2016
@@ -199,7 +199,7 @@
/**
* Find the unique set of MCParticles that are referenced by the hits of the Cluster.
- * @param clusters The input Cluster.
+ * @param cluster The input Cluster.
* @return The set of unique MCParticles.
*/
public static Set<MCParticle> findMCParticles(Cluster cluster) {
@@ -428,7 +428,7 @@
/**
* Get the set of hits from a list of clusters.
- * @param The input cluster list.
+ * @param clusters The input cluster list.
* @return The list of hits from all the clusters.
*/
public static Set<CalorimeterHit> getHits(List<Cluster> clusters) {
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/ClustererFactory.java Mon Mar 21 18:04:10 2016
@@ -38,7 +38,7 @@
* @param name The name of the clustering algorithm.
* @param cuts The set of cuts (can be null).
* @return The clustering algorithm.
- * @throw IllegalArgumentException if there is no Clusterer found with name.
+ * @throws IllegalArgumentException if there is no Clusterer found with name.
*/
public static Clusterer create(String name, double[] cuts) {
Clusterer clusterer;
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/CopyClusterCollectionDriver.java Mon Mar 21 18:04:10 2016
@@ -111,8 +111,8 @@
/**
* Set to <code>true</code> to store hits in the output clusters.
- *
- * @return <code>true</code> to store hits in the output clusters
+ *
+ * @param storeHits <code>true</code> to store hits; <code>false</code> to not store hits
*/
public void setStoreHits(final boolean storeHits) {
this.storeHits = storeHits;
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/GTPClusterer.java Mon Mar 21 18:04:10 2016
@@ -124,7 +124,7 @@
* forms a collection of <code>Cluster</code> objects according to
* the GTP clustering algorithm.
* @param event - The object containing event data.
- * @param hitList - A list of <code>CalorimeterHit</code> objects
+ * @param hits - A list of <code>CalorimeterHit</code> objects
* from which clusters should be formed.
*/
public List<Cluster> createClusters(EventHeader event, List<CalorimeterHit> hits) {
Modified: java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java
=============================================================================
--- java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java (original)
+++ java/branches/jeremy-dev2/ecal-recon/src/main/java/org/hps/recon/ecal/cluster/SimpleCosmicClusterer.java Mon Mar 21 18:04:10 2016
@@ -125,9 +125,8 @@
}
/**
- * This method takes a list of potential cluster hits and applies selection cuts,
- * returning a new list that has the hit lists which did not pass the cuts removed.
- * @param clusteredHitLists The input hit lists.
+ * Apply selection cuts to cluster list and return filtered list.
+ * @param clusterList The input hit lists.
* @return The hit lists that passed the cuts.
*/
protected List<Cluster> applyCuts(List<Cluster> clusterList) {
Modified: java/branches/jeremy-dev2/evio/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/evio/pom.xml (original)
+++ java/branches/jeremy-dev2/evio/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/evio/</url>
Modified: java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java
=============================================================================
--- java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java (original)
+++ java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/AbstractSvtEvioReader.java Mon Mar 21 18:04:10 2016
@@ -12,12 +12,7 @@
import org.hps.record.svt.SvtEvioExceptions.SvtEvioReaderException;
import org.hps.util.Pair;
import org.jlab.coda.jevio.BaseStructure;
-import org.jlab.coda.jevio.DataType;
import org.jlab.coda.jevio.EvioEvent;
-import org.jlab.coda.jevio.IEvioFilter;
-import org.jlab.coda.jevio.IEvioStructure;
-import org.jlab.coda.jevio.StructureFinder;
-import org.jlab.coda.jevio.StructureType;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
import org.lcsim.event.RawTrackerHit;
@@ -31,7 +26,6 @@
*
* @author Omar Moreno <[log in to unmask]>
* @author Per Hansson Adrian <[log in to unmask]>
- * @date November 20, 2014
*
*/
public abstract class AbstractSvtEvioReader extends EvioReader {
@@ -39,7 +33,7 @@
public static final String SVT_HEADER_COLLECTION_NAME = "SvtHeaders";
// Initialize the logger
- public static Logger LOGGER = Logger.getLogger(AbstractSvtEvioReader.class.getPackage().getName());
+ public static final Logger LOGGER = Logger.getLogger(AbstractSvtEvioReader.class.getPackage().getName());
// A Map from DAQ pair (FPGA/Hybrid or FEB ID/FEB Hybrid ID) to the
// corresponding sensor
@@ -47,16 +41,11 @@
HpsSiSensor /* Sensor */> daqPairToSensor
= new HashMap<Pair<Integer, Integer>, HpsSiSensor>();
- // A collection of banks that should be processed after all hits have been made
- protected List<BaseStructure> eventBanks = new ArrayList<BaseStructure>();
-
// Flag indicating whether the DAQ map has been setup
protected boolean isDaqMapSetup = false;
// Collections and names
private static final String SVT_HIT_COLLECTION_NAME = "SVTRawTrackerHits";
- List<RawTrackerHit> rawHits = new ArrayList<RawTrackerHit>();
- List<SvtHeaderDataInfo> headers = new ArrayList<SvtHeaderDataInfo>();
// Constants
private static final String SUBDETECTOR_NAME = "Tracker";
@@ -76,6 +65,19 @@
*/
abstract protected int getMaxRocBankTag();
+ /**
+ * Get the minimum SVT ROC bank tag in the event.
+ *
+ * @return Minimum SVT ROC bank tag
+ */
+ abstract protected int getMinDataBankTag();
+
+ /**
+ * Get the maximum SVT ROC bank tag in the event.
+ *
+ * @return Maximum SVT ROC bank tag
+ */
+ abstract protected int getMaxDataBankTag();
/**
* Get the SVT ROC bank number of the bank encapsulating the SVT samples.
@@ -116,14 +118,6 @@
*/
abstract protected HpsSiSensor getSensor(int[] data);
- /**
- * Check whether a data bank is valid i.e. contains SVT samples only.
- *
- * @param dataBank - An EVIO bank containing integer data
- * @return true if the bank is valid, false otherwise
- */
- abstract protected boolean isValidDataBank(BaseStructure dataBank);
-
/**
* Check whether the samples are valid
*
@@ -154,125 +148,88 @@
* @return true if the raw hits were created successfully, false otherwise
* @throws SvtEvioReaderException
*/
+ @Override
public boolean makeHits(EvioEvent event, EventHeader lcsimEvent) throws SvtEvioReaderException {
LOGGER.finest("Physics Event: " + event.toString());
- // Retrieve the ROC banks encapsulated by the physics bank. The ROC
+ // Retrieve the data banks encapsulated by the physics bank. The ROC
// bank range is set in the subclass.
- List<BaseStructure> rocBanks = new ArrayList<BaseStructure>();
- for (int rocBankTag = this.getMinRocBankTag();
- rocBankTag <= this.getMaxRocBankTag(); rocBankTag++) {
-
- LOGGER.finest("Retrieving ROC bank: " + rocBankTag);
- List<BaseStructure> matchingRocBanks = this.getMatchingBanks(event, rocBankTag);
- if (matchingRocBanks == null) {
- LOGGER.finest("ROC bank " + rocBankTag + " was not found!");
- continue;
- }
- rocBanks.addAll(matchingRocBanks);
- }
- LOGGER.finest("Total ROC banks found: " + rocBanks.size());
-
- // Return false if ROC banks weren't found
- if (rocBanks.isEmpty()) return false;
+ List<BaseStructure> dataBanks = SvtEvioUtils.getDataBanks(event, this.getMinRocBankTag(), this.getMaxRocBankTag(), this.getMinDataBankTag(), this.getMaxDataBankTag());
+
+ // Return false if data banks weren't found
+ if (dataBanks.isEmpty()) return false;
// Setup the DAQ map if it's not setup
if (!this.isDaqMapSetup)
this.setupDaqMap(lcsimEvent.getDetector().getSubdetector(
SUBDETECTOR_NAME));
- // Clear the list of raw tracker hits
- rawHits.clear();
-
- // Clear the list of headers
- headers.clear();
-
- // Loop over the SVT ROC banks and process all samples
- for (BaseStructure rocBank : rocBanks) {
-
- LOGGER.finest("ROC bank: " + rocBank.toString());
-
- LOGGER.finest("Processing ROC bank " + rocBank.getHeader().getTag());
-
- // If the ROC bank doesn't contain any data, raise an exception
- if (rocBank.getChildCount() == 0) {
- throw new SvtEvioReaderException("[ " + this.getClass().getSimpleName()
- + " ]: SVT bank doesn't contain any data banks.");
+ List<RawTrackerHit> rawHits = new ArrayList<RawTrackerHit>();
+ List<SvtHeaderDataInfo> headers = new ArrayList<SvtHeaderDataInfo>();
+
+ LOGGER.finest("Total data banks found: " + dataBanks.size());
+
+ // Loop over all of the data banks contained by the ROC banks and
+ // processed them
+ for (BaseStructure dataBank : dataBanks) {
+
+ LOGGER.finest("Processing data bank: " + dataBank.toString());
+
+ // Get the int data encapsulated by the data bank
+ int[] data = dataBank.getIntData();
+ LOGGER.finest("Total number of integers contained by the data bank: " + data.length);
+
+ // Check that a complete set of samples exist
+ int sampleCount = data.length - this.getDataHeaderLength()
+ - this.getDataTailLength();
+ LOGGER.finest("Total number of samples: " + sampleCount);
+ if (sampleCount % 4 != 0) {
+ throw new SvtEvioReaderException("[ "
+ + this.getClass().getSimpleName()
+ + " ]: Size of samples array is not divisible by 4");
}
-
- // Get the data banks containing the SVT samples.
- List<BaseStructure> dataBanks = rocBank.getChildren();
- LOGGER.finest("Total data banks found: " + dataBanks.size());
-
- // Loop over all of the data banks contained by the ROC banks and
- // processed them
- for (BaseStructure dataBank : dataBanks) {
-
- LOGGER.finest("Processing data bank: " + dataBank.toString());
-
- // Check that the bank is valid
- if (!this.isValidDataBank(dataBank)) continue;
-
- // Get the int data encapsulated by the data bank
- int[] data = dataBank.getIntData();
- LOGGER.finest("Total number of integers contained by the data bank: " + data.length);
-
- // Check that a complete set of samples exist
- int sampleCount = data.length - this.getDataHeaderLength()
- - this.getDataTailLength();
- LOGGER.finest("Total number of samples: " + sampleCount);
- if (sampleCount % 4 != 0) {
- throw new SvtEvioReaderException("[ "
- + this.getClass().getSimpleName()
- + " ]: Size of samples array is not divisible by 4");
- }
-
- // extract header and tail information
- SvtHeaderDataInfo headerData = this.extractSvtHeader(dataBank.getHeader().getNumber(), data);
-
- // Check that the multisample count is consistent
- this.checkSvtSampleCount(sampleCount, headerData);
-
- // Add header to list
- headers.add(headerData);
-
-
- // Store the multisample headers
- // Note that the length is not known but can't be longer than the multisample count
- // in other words the data can be only header multisamples for example.
- int multisampleHeaderData[] = new int[sampleCount];
- int multisampleHeaderIndex = 0;
-
- LOGGER.finest("sampleCount " + sampleCount);
-
- // Loop through all of the samples and make hits
- for (int samplesN = 0; samplesN < sampleCount; samplesN += 4) {
-
- int[] samples = new int[4];
- System.arraycopy(data, this.getDataHeaderLength() + samplesN, samples, 0, samples.length);
-
- LOGGER.finest("samplesN " + samplesN + " multisampleHeaderCount " + multisampleHeaderIndex);
- if(SvtEvioUtils.isMultisampleHeader(samples))
- LOGGER.finest("this is a header multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
- else
- LOGGER.finest("this is a data multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
-
-
+
+ // extract header and tail information
+ SvtHeaderDataInfo headerData = this.extractSvtHeader(dataBank.getHeader().getNumber(), data);
+
+ // Check that the multisample count is consistent
+ this.checkSvtSampleCount(sampleCount, headerData);
+
+ // Add header to list
+ headers.add(headerData);
+
+ // Store the multisample headers
+ // Note that the length is not known but can't be longer than the multisample count
+ // in other words the data can be only header multisamples for example.
+ int multisampleHeaderData[] = new int[sampleCount];
+ int multisampleHeaderIndex = 0;
+
+ LOGGER.finest("sampleCount " + sampleCount);
+
+ List<int[]> multisampleList = SvtEvioUtils.getMultisamples(data, sampleCount, this.getDataHeaderLength());
+ // Loop through all of the samples and make hits
+ for (int[] samples:multisampleList) {
+ if (SvtEvioUtils.isMultisampleHeader(samples)) {
+ LOGGER.finest("this is a header multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
// Extract data words from multisample header and update index
multisampleHeaderIndex += this.extractMultisampleHeaderData(samples, multisampleHeaderIndex, multisampleHeaderData);
-
- // If a set of samples is associated with an APV header or tail, skip it
- if (!this.isValidSampleSet(samples)) continue;
- rawHits.add(this.makeHit(samples));
+ } else {
+ LOGGER.finest("this is a data multisample for apv " + SvtEvioUtils.getApvFromMultiSample(samples) + " ch " + SvtEvioUtils.getChannelNumber(samples));
}
-
- LOGGER.finest("got " + multisampleHeaderIndex + " multisampleHeaderIndex for " + sampleCount + " sampleCount");
-
- // add multisample header tails to header data object
- this.setMultiSampleHeaders(headerData, multisampleHeaderIndex, multisampleHeaderData);
-
+
+ // If a set of samples is associated with an APV header or tail, skip it
+ if (!this.isValidSampleSet(samples)) {
+ continue;
+ }
+ rawHits.add(this.makeHit(samples));
}
+
+ LOGGER.finest("got " + multisampleHeaderIndex + " multisampleHeaderIndex for " + sampleCount + " sampleCount");
+
+ // add multisample header tails to header data object
+ this.setMultiSampleHeaders(headerData, multisampleHeaderIndex, multisampleHeaderData);
+
}
LOGGER.finest("Total number of RawTrackerHits created: " + rawHits.size());
@@ -282,13 +239,9 @@
// Add the collection of raw hits to the LCSim event
lcsimEvent.put(SVT_HIT_COLLECTION_NAME, rawHits, RawTrackerHit.class, flag, READOUT_NAME);
-
// Process SVT headers
this.processSvtHeaders(headers, lcsimEvent);
-
-
-
return true;
}
@@ -322,7 +275,7 @@
* @param samples
* @param index
* @param multisampleHeaderData
- * @return
+ * @return the length of the extracted samples or 0 if not a multisample header
*/
protected int extractMultisampleHeaderData(int[] samples, int index, int[] multisampleHeaderData) {
LOGGER.finest("extractMultisampleHeaderData: index " + index);
@@ -394,26 +347,4 @@
// Create and return a RawTrackerHit
return new BaseRawTrackerHit(hitTime, cellID, SvtEvioUtils.getSamples(data), null, sensor);
}
-
- /**
- * Retrieve all the banks in an event that match the given tag in their
- * header and are not data banks.
- *
- * @param structure : The event/bank being queried
- * @param tag : The tag to match
- * @return A collection of all bank structures that pass the filter
- * provided by the event
- */
- protected List<BaseStructure> getMatchingBanks(BaseStructure structure, final int tag) {
- IEvioFilter filter = new IEvioFilter() {
- public boolean accept(StructureType type, IEvioStructure struc) {
- return (type == StructureType.BANK)
- && (tag == struc.getHeader().getTag())
- && (struc.getHeader().getDataType() == DataType.ALSOBANK);
- }
- };
- return StructureFinder.getMatchingStructures(structure, filter);
- }
-
-
}
Modified: java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioReader.java
=============================================================================
--- java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioReader.java (original)
+++ java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioReader.java Mon Mar 21 18:04:10 2016
@@ -18,8 +18,8 @@
protected String hitCollectionName = null;
/**
- * Make a LCIO hit collection (e.g. {@link RawTrackerHit},
- * {@link CalorimeterHit} from raw EVIO data.
+ * Make a LCIO hit collection (e.g. {@link org.lcsim.event.RawTrackerHit},
+ * {@link org.lcsim.event.CalorimeterHit} from raw EVIO data.
*
* @param event : The EVIO event to read the raw data from
* @param lcsimEvent : The LCSim event to write the collections to
Modified: java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioToLcio.java
=============================================================================
--- java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioToLcio.java (original)
+++ java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/EvioToLcio.java Mon Mar 21 18:04:10 2016
@@ -463,8 +463,6 @@
* This method will execute the EVIO to LCIO conversion and optionally process the events with LCSim Drivers from a
* steering file. Then the resultant LCIO events will be written to disk if this option is enabled in the command
* line arguments.
- *
- * @param args The command line arguments.
*/
public void run() {
Modified: java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java
=============================================================================
--- java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java (original)
+++ java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/LCSimEngRunEventBuilder.java Mon Mar 21 18:04:10 2016
@@ -121,10 +121,6 @@
} else {
LOGGER.info("Run manager is not initialized; TI time offset not available.");
}
- try {
- RunManager.getRunManager().closeConnection();
- } catch (Exception e) {
- }
}
/**
Modified: java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/SvtEvioReader.java
=============================================================================
--- java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/SvtEvioReader.java (original)
+++ java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/SvtEvioReader.java Mon Mar 21 18:04:10 2016
@@ -7,7 +7,6 @@
import org.hps.record.svt.SvtEvioExceptions.SvtEvioHeaderException;
import org.hps.record.svt.SvtEvioExceptions.SvtEvioReaderException;
import org.hps.util.Pair;
-import org.jlab.coda.jevio.BaseStructure;
import org.jlab.coda.jevio.EvioEvent;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.event.EventHeader;
@@ -18,8 +17,6 @@
* SVT EVIO reader used to convert SVT bank integer data to LCIO objects.
*
* @author Omar Moreno <[log in to unmask]>
- * @data February 03, 2015
- *
*/
public class SvtEvioReader extends AbstractSvtEvioReader {
@@ -30,6 +27,7 @@
private static final int DATA_TAIL_LENGTH = 1;
public static final int MIN_ROC_BANK_TAG = 51;
public static final int MAX_ROC_BANK_TAG = 66;
+ public static final int DATA_BANK_TAG = 3;
private static final int ROC_BANK_NUMBER = 0;
/**
@@ -52,6 +50,16 @@
return MAX_ROC_BANK_TAG;
}
+ @Override
+ protected int getMinDataBankTag() {
+ return DATA_BANK_TAG;
+ }
+
+ @Override
+ protected int getMaxDataBankTag() {
+ return DATA_BANK_TAG;
+ }
+
/**
* Get the SVT ROC bank number of the bank encapsulating the SVT samples.
*
@@ -127,36 +135,13 @@
}
/**
- * Check whether a data bank is valid i.e. contains SVT samples only. For
- * the engineering run, a valid data bank has a tag of 3.
- *
- * @param dataBank - An EVIO bank containing integer data
- * @return true if the bank is valid, false otherwise
- *
- */
- @Override
- protected boolean isValidDataBank(BaseStructure dataBank) {
-
- // The SVT configuration is stored in a bank with tag equal to 57614.
- // All other event banks are invalid
- if (dataBank.getHeader().getTag() == 57614) {
-
- // Store the event bank for processing later.
- eventBanks.add(dataBank);
-
- return false;
- } else if (dataBank.getHeader().getTag() != 3) return false;
-
- return true;
- }
-
- /**
* Check whether the samples are valid. Specifically, check if the samples
* are APV header or tails.
*
* @param data : sample block of data
* @return true if the samples are valid, false otherwise
*/
+ @Override
protected boolean isValidSampleSet(int[] data) {
return !(SvtEvioUtils.isMultisampleHeader(data) || SvtEvioUtils.isMultisampleTail(data));
}
@@ -191,9 +176,6 @@
}
}*/
- // Clear out the event banks after they have been processed
- eventBanks.clear();
-
return success;
}
Modified: java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java
=============================================================================
--- java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java (original)
+++ java/branches/jeremy-dev2/evio/src/main/java/org/hps/evio/TestRunSvtEvioReader.java Mon Mar 21 18:04:10 2016
@@ -6,7 +6,6 @@
import org.hps.record.svt.SvtEvioUtils;
import org.hps.record.svt.SvtHeaderDataInfo;
import org.hps.util.Pair;
-import org.jlab.coda.jevio.BaseStructure;
import org.lcsim.detector.tracker.silicon.HpsSiSensor;
import org.lcsim.detector.tracker.silicon.HpsTestRunSiSensor;
import org.lcsim.event.EventHeader;
@@ -18,8 +17,6 @@
* objects.
*
* @author Omar Moreno <[log in to unmask]>
- * @date November 20, 2014
- *
*/
public class TestRunSvtEvioReader extends AbstractSvtEvioReader {
@@ -30,6 +27,7 @@
private static final int DATA_HEADER_LENGTH = 7;
private static final int DATA_TAIL_LENGTH = 1;
private static final int MAX_FPGA_ID = 6;
+ public static final int MIN_DATA_BANK_TAG = 0;
private static final int ROC_BANK_TAG = 3;
private static final int ROC_BANK_NUMBER = -1;
@@ -56,6 +54,16 @@
@Override
protected int getMaxRocBankTag() {
return ROC_BANK_TAG;
+ }
+
+ @Override
+ protected int getMinDataBankTag() {
+ return MIN_DATA_BANK_TAG;
+ }
+
+ @Override
+ protected int getMaxDataBankTag() {
+ return MAX_FPGA_ID;
}
/**
@@ -131,26 +139,12 @@
}
/**
- * Check whether a data bank is valid i.e. contains SVT samples only. For
- * the test run, a valid data bank has a tag in the range 0-6.
- *
- * @param dataBank - An EVIO bank containing integer data
- * @return true if the bank is valid, false otherwise
- *
- */
- @Override
- protected boolean isValidDataBank(BaseStructure dataBank) {
- if (dataBank.getHeader().getTag() < 0
- || dataBank.getHeader().getTag() >= MAX_FPGA_ID) return false;
- return true;
- }
-
- /**
* Check whether the samples are valid.
*
* @param data : sample block of data
* @return true if the samples are valid, false otherwise
*/
+ @Override
protected boolean isValidSampleSet(int[] data) {
return true;
}
Modified: java/branches/jeremy-dev2/job/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/job/pom.xml (original)
+++ java/branches/jeremy-dev2/job/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/job/</url>
Modified: java/branches/jeremy-dev2/job/src/main/java/org/hps/job/DatabaseConditionsManagerSetup.java
=============================================================================
--- java/branches/jeremy-dev2/job/src/main/java/org/hps/job/DatabaseConditionsManagerSetup.java (original)
+++ java/branches/jeremy-dev2/job/src/main/java/org/hps/job/DatabaseConditionsManagerSetup.java Mon Mar 21 18:04:10 2016
@@ -107,7 +107,7 @@
/**
* Do cleanup of conditions system after job.
* <p>
- * Shuts down the database connections to the run manager and conditions manager, if necessary.
+ * Shuts down the database connection to the conditions manager.
*/
@Override
public void cleanup() {
@@ -124,11 +124,6 @@
LOGGER.log(Level.WARNING, e.getMessage(), e);
}
- // Close the run manager connection.
- if (enableRunManager) {
- RunManager.getRunManager().closeConnection();
- }
-
LOGGER.config("done cleaning up");
}
}
Modified: java/branches/jeremy-dev2/job/src/main/java/org/hps/job/JobManager.java
=============================================================================
--- java/branches/jeremy-dev2/job/src/main/java/org/hps/job/JobManager.java (original)
+++ java/branches/jeremy-dev2/job/src/main/java/org/hps/job/JobManager.java Mon Mar 21 18:04:10 2016
@@ -2,6 +2,10 @@
import java.util.HashSet;
import java.util.Set;
+
+import org.lcsim.util.Driver;
+
+import org.hps.conditions.ConditionsDriver;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
@@ -70,4 +74,20 @@
}
return commandLine;
}
+
+ /**
+ * Initialize <code>ConditionsDriver</code> if necessary.
+ **/
+ protected void setupDrivers() {
+ super.setupDrivers();
+ for (Driver driver : this.getDriverExecList()) {
+ if (driver instanceof ConditionsDriver) {
+ ConditionsDriver conditions = (ConditionsDriver) driver;
+ getConditionsSetup().setRun(conditions.getRunNumber());
+ getConditionsSetup().setDetectorName(conditions.getDetectorName());
+ break;
+ }
+ }
+ }
+
}
Modified: java/branches/jeremy-dev2/logging/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/logging/pom.xml (original)
+++ java/branches/jeremy-dev2/logging/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/logging/</url>
Modified: java/branches/jeremy-dev2/monitoring-app/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/monitoring-app/pom.xml (original)
+++ java/branches/jeremy-dev2/monitoring-app/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/monitoring-app/</url>
Modified: java/branches/jeremy-dev2/monitoring-drivers/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/monitoring-drivers/pom.xml (original)
+++ java/branches/jeremy-dev2/monitoring-drivers/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/monitoring-drivers/</url>
Modified: java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java
=============================================================================
--- java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java (original)
+++ java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalDaqPlots.java Mon Mar 21 18:04:10 2016
@@ -18,11 +18,12 @@
import org.lcsim.util.aida.AIDA;
/**
- * The driver <code>EcalDaqPlots</code> implements the histogram shown to the user in the fourth tab of the Monitoring Application, when using the Ecal monitoring lcsim file. It contains only a
- * sub-tab, showing the number of hits recorded by the different FADC channels. It is a very preliminary driver to monitor the DAQ status. These plots are updated continuosly.
+ * The driver <code>EcalDaqPlots</code> implements the histogram shown to the user in the fourth tab of the
+ * Monitoring Application, when using the Ecal monitoring lcsim file. It contains only a sub-tab, showing
+ * the number of hits recorded by the different FADC channels. It is a very preliminary driver to monitor
+ * the DAQ status. These plots are updated continuously.
+ *
* @author Andrea Celentano
- * @TODO: integrate with the new conditions system.
- *
*/
public class EcalDaqPlots extends Driver {
Modified: java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java
=============================================================================
--- java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java (original)
+++ java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalLedSequenceMonitor.java Mon Mar 21 18:04:10 2016
@@ -1,4 +1,5 @@
package org.hps.monitoring.ecal.plots;
+
import hep.aida.IEvaluator;
import hep.aida.IFitResult;
@@ -52,6 +53,8 @@
import org.lcsim.util.Driver;
import org.lcsim.util.aida.AIDA;
+
+
/* This is the driver used to determine the response of each calorimeter channel after a LED run
* @author Andrea Celentano <[log in to unmask]>
*/
@@ -67,7 +70,7 @@
String inputCollectionRaw = "EcalReadoutHits";
- String inputCollection = "EcalCalHits";
+ String inputCollection = "EcalCalHits";
AIDA aida;
DatabaseConditionsManager conditionsManager;
@@ -87,7 +90,7 @@
String outFileName;
- private int runNumber = 0;
+ private int runNumber = 0;
private int eventN = 0;
private int id,row,column,chid,ledId,driverId;
private int[][] expectedSequence = new int[][]{ /*A.C. it is a terrible thing to have this hard-coded here!*/
@@ -97,9 +100,9 @@
{112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,-1}, //missing 135 is ok
{168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223},
//second 4 are the flasher2 sequence, BOTTOM controller
- {2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,-1,-1},
+ {2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,-1,-1},
{56,57,58,59,60,61,62,63,64,65,66,67,68,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,-1}, //missing 69 is OK
- {112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167},
+ {112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167},
{168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223},
};
private int[][] actualSequence=new int[nDrivers][nSteps];
@@ -126,6 +129,7 @@
private IFunction fFunction,fFunction1;
private IProfile1D cProfile;
private IHistogram2D hMeanCharge2D;
+ private IHistogram2D hMeanCharge2DReferenceRatio;
private ArrayList<IHistogram1D> hCharge;
private ArrayList<IHistogram2D> hChargeVsEvn;
private IPlotterFactory factory;
@@ -148,6 +152,11 @@
private double fEvnMaxDraw=80000.;
private double fChargeMinDraw=0.;
private double fChargeMaxDraw=100.;
+
+ /*The reference run numbers*/
+ private int fRedReferenceID;
+ private int fBlueReferenceID;
+
/*Components for user interaction*/
private JDialog dialog;
@@ -160,6 +169,14 @@
private LedColor m_ret=LedColor.UNKNOWN; //use UNKNONW as CANCEL button
static Object modalMonitor = new Object();
+ public void setRedReferenceID(int redReference){
+ this.fRedReferenceID=redReference;
+ }
+
+ public void setBlueReferenceID(int blueReference){
+ this.fBlueReferenceID=blueReference;
+ }
+
public void setUseRawEnergy(boolean useRawEnergy) {
this.useRawEnergy=useRawEnergy;
}
@@ -217,7 +234,7 @@
conditionsManager = DatabaseConditionsManager.getInstance();
LedTopMap = new HashMap< Integer , Integer >(); //key: ecal channel ID. Value: led id
- LedBotMap = new HashMap< Integer , Integer >();
+ LedBotMap = new HashMap< Integer , Integer >();
LedTopMapInverted = new HashMap< Integer , Integer >(); //key: led id. Value: ecal channel id
LedBotMapInverted = new HashMap< Integer , Integer >();
@@ -225,21 +242,21 @@
ChannelCollection = conditionsManager.getCachedConditions(EcalChannelCollection.class, "ecal_channels").getCachedData();
LedCollection = conditionsManager.getCachedConditions(EcalLedCollection.class, "ecal_leds").getCachedData();
- ecalConditions = conditionsManager.getEcalConditions();
+ ecalConditions = conditionsManager.getEcalConditions();
for (EcalChannel channel : ChannelCollection){
chid = channel.getChannelId();
- for (EcalLed Led : LedCollection) {
- if (Led.getEcalChannelId()==chid){
- if (channel.getY()>0){
- LedTopMap.put( chid , Led.getLedNumber() );
- LedTopMapInverted.put( Led.getLedNumber(), chid );
- }
- else if (channel.getY()<0){
- LedBotMap.put( chid , Led.getLedNumber() );
- LedBotMapInverted.put( Led.getLedNumber(), chid );
- }
- }
+ for (EcalLed Led : LedCollection) {
+ if (Led.getEcalChannelId()==chid){
+ if (channel.getY()>0){
+ LedTopMap.put( chid , Led.getLedNumber() );
+ LedTopMapInverted.put( Led.getLedNumber(), chid );
+ }
+ else if (channel.getY()<0){
+ LedBotMap.put( chid , Led.getLedNumber() );
+ LedBotMapInverted.put( Led.getLedNumber(), chid );
+ }
+ }
}
}
@@ -249,14 +266,16 @@
aida = AIDA.defaultInstance();
aida.tree().cd("/");
hMeanCharge2D = aida.histogram2D("Average LED response", 47, -23.5, 23.5, 11, -5.5, 5.5);
-
+ hMeanCharge2DReferenceRatio = aida.histogram2D("Ratio this run VS reference run", 47, -23.5, 23.5, 11, -5.5, 5.5);
+
factory= aida.analysisFactory().createPlotterFactory("Ecal Led Sequence");
pPlotter= factory.create("Drivers");
pPlotter.createRegions(4,2);
if (isMonitoringApp){
pPlotter2=factory.create("Sequence Map");
- pPlotter2.createRegions(1,1);
+ pPlotter2.createRegions(1,2);
pPlotter2.region(0).plot(hMeanCharge2D);
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
}
iTuple = new ArrayList<ITuple>(NUM_CHANNELS);
hCharge = new ArrayList<IHistogram1D>(NUM_CHANNELS);
@@ -268,7 +287,7 @@
for (int ii=0;ii<NUM_CHANNELS;ii++){
int row = EcalMonitoringUtilities.getRowFromHistoID(ii);
- int column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
+ int column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
iTuple.add(aida.analysisFactory().createTupleFactory(aida.tree()).create("nTuple"+ii,"nTuple"+ii,"int fEvn=0 , double fCharge=0.,double fTime=0.",""));
}
@@ -280,7 +299,7 @@
pPlotter.show();
if (isMonitoringApp) pPlotter2.show();
- }
+ }
@Override
public void process(EventHeader event) {
@@ -291,83 +310,83 @@
List<CalorimeterHit> hits = event.get(CalorimeterHit.class, inputCollection);
for (CalorimeterHit hit : hits) {
- column = hit.getIdentifierFieldValue("ix");
- row = hit.getIdentifierFieldValue("iy");
- id = EcalMonitoringUtilities.getHistoIDFromRowColumn(row, column);
- cellID=hit.getCellID();
- chid = ChannelCollection.findGeometric(cellID).getChannelId();
-
- energy = hit.getCorrectedEnergy();
-
- if (useRawEnergy){
- fillEnergy = getRawADCSum(energy,cellID);
- }
- else {
- fillEnergy = energy;
- }
- fillTime = hit.getTime();
-
-
- //find the LED
- if (row>0){
- ledId=LedTopMap.get(chid);
- }
- else if (row<0){
- ledId=LedBotMap.get(chid);
- }
- driverId=getDriver(ledId);
- if (row<0) driverId+=4;
-
-
-
- /*Skip the events under thr*/
- if (energy<energyCut) continue;
-
- /*First, check if this led is the one in the NEXT step. Therefore, increment by 1 the step*/
- /*
- * if (iStep[driverId]==0){
-
+ column = hit.getIdentifierFieldValue("ix");
+ row = hit.getIdentifierFieldValue("iy");
+ id = EcalMonitoringUtilities.getHistoIDFromRowColumn(row, column);
+ cellID=hit.getCellID();
+ chid = ChannelCollection.findGeometric(cellID).getChannelId();
+
+ energy = hit.getCorrectedEnergy();
+
+ if (useRawEnergy){
+ fillEnergy = getRawADCSum(energy,cellID);
+ }
+ else {
+ fillEnergy = energy;
+ }
+ fillTime = hit.getTime();
+
+
+ //find the LED
+ if (row>0){
+ ledId=LedTopMap.get(chid);
+ }
+ else if (row<0){
+ ledId=LedBotMap.get(chid);
+ }
+ driverId=getDriver(ledId);
+ if (row<0) driverId+=4;
+
+
+
+ /*Skip the events under thr*/
+ if (energy<energyCut) continue;
+
+ /*First, check if this led is the one in the NEXT step. Therefore, increment by 1 the step*/
+ /*
+ * if (iStep[driverId]==0){
+
actualSequence[driverId][iStep[driverId]]=ledId;
- iStep[driverId]=1;
+ iStep[driverId]=1;
}
else if ((iStep[driverId]==1)&&(ledId!=actualSequence[driverId][0])){
- System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
+ System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
if (iStep[driverId]>0) drawProfiles(actualSequence[driverId][iStep[driverId]-1],driverId);
actualSequence[driverId][iStep[driverId]]=ledId;
iStep[driverId]++;
}
else if ((iStep[driverId]>1)&&(ledId!=actualSequence[driverId][iStep[driverId]-1])&&(ledId!=actualSequence[driverId][iStep[driverId]-2])){
- System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
+ System.out.println("LedAnalysis:: increment step ("+iStep[driverId]+") for driver "+driverId+" . Led ID: "+ledId+" Column: "+column+" Row: "+row);
if (iStep[driverId]>0) drawProfiles(actualSequence[driverId][iStep[driverId]-1],driverId);
actualSequence[driverId][iStep[driverId]]=ledId;
iStep[driverId]++;
}
- // if (iStep[driverId]==-1) continue;
-
- */
-
- if (iStep[driverId]==-1) continue; /*Not yet data*/
-
- /*Put this code here, since we want to always fill the ntuple*/
- iTuple.get(id).fill(0,nEvents[id]);
- iTuple.get(id).fill(1,fillEnergy);
- iTuple.get(id).fill(2,fillTime);
- iTuple.get(id).addRow();
- nEvents[id]++;
-
-
-
- /*Add a debug print */
- if (eventN % 10000==0){
- System.out.println("Debug. Event "+eventN+" LED ID: "+ledId+" DRIVER ID: "+driverId+" ECAL ID: "+id+" ROW: "+row+" COLUMN: "+column+ "HISTO ID: "+id);
- }
+ // if (iStep[driverId]==-1) continue;
+
+ */
+
+ if (iStep[driverId]==-1) continue; /*Not yet data*/
+
+ /*Put this code here, since we want to always fill the ntuple*/
+ iTuple.get(id).fill(0,nEvents[id]);
+ iTuple.get(id).fill(1,fillEnergy);
+ iTuple.get(id).fill(2,fillTime);
+ iTuple.get(id).addRow();
+ nEvents[id]++;
+
+
+
+ /*Add a debug print */
+ if (eventN % 10000==0){
+ System.out.println("Debug. Event "+eventN+" LED ID: "+ledId+" DRIVER ID: "+driverId+" ECAL ID: "+id+" ROW: "+row+" COLUMN: "+column+ "HISTO ID: "+id);
+ }
}
if (eventN % 10000==0){
- System.out.println("\n");
- }
- }
+ System.out.println("\n");
+ }
+ }
}
/*
@@ -401,7 +420,7 @@
IFunctionFactory fFactory=aida.analysisFactory().createFunctionFactory(aida.tree());
IFitResult fResult;
- IFitter fFitter;
+ IFitter fFitter;
for (int id = 0; id < 11 * 47; id++) {
@@ -420,9 +439,9 @@
/*Clear previous*/
if (id>0){
- aida.tree().rm("strip");
- aida.tree().rm("fun0");
- aida.tree().rm("fun1");
+ aida.tree().rm("strip");
+ aida.tree().rm("fun0");
+ aida.tree().rm("fun1");
}
/*Create the profile.*/
cProfile=aida.profile1D("strip",nBins,-0.5,nEvents[id]*(1-skipInitial)+0.5);
@@ -432,79 +451,79 @@
fFunction1=fFactory.createFunctionByName("fun1","G");
if (EcalMonitoringUtilities.isInHole(row,column)==true){
- System.out.println("Channel X= "+column+" Y= "+row+" is in hole. Skip");
- hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
- System.out.println("In hole, skip");
- continue;
+ System.out.println("Channel X= "+column+" Y= "+row+" is in hole. Skip");
+ hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
+ System.out.println("In hole, skip");
+ continue;
}
else if (nEvents[id]<nEventsMin) {
- hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
- System.err.println("LedAnalysis:: the channel X= "+column+" Y= "+row+" has not enough events "+nEvents[id]+" "+nEventsMin);
-
- continue;
- }
+ hCharge.add(aida.histogram1D("charge_"+id,200,0.,1.)); //create here the histogram to keep sync
+ System.err.println("LedAnalysis:: the channel X= "+column+" Y= "+row+" has not enough events "+nEvents[id]+" "+nEventsMin);
+
+ continue;
+ }
//Fill the profile*/
nSkip=(int)(nEvents[id]*skipInitial);
if (nSkip>iTuple.get(id).rows()){
- System.out.println("Can't skip initial events?");
- nSkip=0;
+ System.out.println("Can't skip initial events?");
+ nSkip=0;
}
iTuple.get(id).start();
iTuple.get(id).skip(nSkip); //This is the work-around for those channels with charge starting from 0 and rapidly growing//
n=0;
iTuple.get(id).next();
while ( iTuple.get(id).next() ){
- e=iTuple.get(id).getDouble(1);
- if (e<eMin) eMin=e;
- if (e>eMax) eMax=e;
- cProfile.fill(1.*n,e);
- n++;
- }
+ e=iTuple.get(id).getDouble(1);
+ if (e<eMin) eMin=e;
+ if (e>eMax) eMax=e;
+ cProfile.fill(1.*n,e);
+ n++;
+ }
fFitter=aida.analysisFactory().createFitFactory().createFitter("chi2","","v");
if (doFullAnalysis){
- //Init function parameters
- double[] initialPars={eMax-eMin,nEvents[id]/10.,eMin};
- if (initialPars[0]<0) initialPars[0]=0;
- fFunction.setParameters(initialPars);
-
- //Do the fit
- System.out.println("LedAnalysis:: do profile fit "+id+" "+fFitter.engineName()+" "+fFitter.fitMethodName());
- System.out.println("LedAnalysis:: initial parameters "+initialPars[0]+" "+initialPars[1]+" "+initialPars[2]);
- fResult=fFitter.fit(cProfile,fFunction);
- fPars = fResult.fittedParameters();
- fParErrs = fResult.errors();
- fParNames = fResult.fittedParameterNames();
- System.out.println("LedAnalysis:: Status= "+fResult.fitStatus()+" "+fResult.isValid()+" Chi2 = "+fResult.quality()+" NDF: "+fResult.ndf());
- for(int i=0; i< fResult.fittedFunction().numberOfParameters(); i++ ){
- System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
- }
- fFunction.setParameters(fPars);
-
-
- //if fit failed, revert to simpler case
- if ((fResult.isValid()==false)||Double.isNaN(fParErrs[0])||Double.isNaN(fParErrs[1])||Double.isNaN(fParErrs[2])){
- System.out.println("LedAnalysis:: fit failed. Reverting to simpler case");
- nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
- }
- else{
- //Now we have the tau parameter. Take ONLY the events that are with N>5*tau/
- //As a cross-check, also verify that tau > Nevents/10, otherwise skip the first Nevents/2
- //and emit warning
- nSkip=(int)( fPars[1]*5);
- if (nSkip < (nEvents[id]*skipMin)){
- System.out.println("LedAnalysis:: Skip number too low: "+nSkip+" Increment it to "+nEvents[id]/2);
- nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
- }
- if (nSkip > nEvents[id]){
- System.out.println("LedAnalysis:: Skip number too high, reduce it");
- nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
- }
- }
+ //Init function parameters
+ double[] initialPars={eMax-eMin,nEvents[id]/10.,eMin};
+ if (initialPars[0]<0) initialPars[0]=0;
+ fFunction.setParameters(initialPars);
+
+ //Do the fit
+ System.out.println("LedAnalysis:: do profile fit "+id+" "+fFitter.engineName()+" "+fFitter.fitMethodName());
+ System.out.println("LedAnalysis:: initial parameters "+initialPars[0]+" "+initialPars[1]+" "+initialPars[2]);
+ fResult=fFitter.fit(cProfile,fFunction);
+ fPars = fResult.fittedParameters();
+ fParErrs = fResult.errors();
+ fParNames = fResult.fittedParameterNames();
+ System.out.println("LedAnalysis:: Status= "+fResult.fitStatus()+" "+fResult.isValid()+" Chi2 = "+fResult.quality()+" NDF: "+fResult.ndf());
+ for(int i=0; i< fResult.fittedFunction().numberOfParameters(); i++ ){
+ System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
+ }
+ fFunction.setParameters(fPars);
+
+
+ //if fit failed, revert to simpler case
+ if ((fResult.isValid()==false)||Double.isNaN(fParErrs[0])||Double.isNaN(fParErrs[1])||Double.isNaN(fParErrs[2])){
+ System.out.println("LedAnalysis:: fit failed. Reverting to simpler case");
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ }
+ else{
+ //Now we have the tau parameter. Take ONLY the events that are with N>5*tau/
+ //As a cross-check, also verify that tau > Nevents/10, otherwise skip the first Nevents/2
+ //and emit warning
+ nSkip=(int)( fPars[1]*5);
+ if (nSkip < (nEvents[id]*skipMin)){
+ System.out.println("LedAnalysis:: Skip number too low: "+nSkip+" Increment it to "+nEvents[id]/2);
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ }
+ if (nSkip > nEvents[id]){
+ System.out.println("LedAnalysis:: Skip number too high, reduce it");
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ }
+ }
}
else{
- nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
+ nSkip=(int)(nEvents[id]*(skipMin+skipInitial));
}
System.out.println("LedAnalysis:: gaus fit :: Going to skip "+nSkip+" out of "+nEvents[id]);
@@ -516,11 +535,11 @@
iTuple.get(id).skip(nSkip);
n=0;
while (iTuple.get(id).next()){
- e=iTuple.get(id).getDouble(1);
- t=iTuple.get(id).getDouble(2);
- hCharge.get(id).fill(e);
- n++;
- }
+ e=iTuple.get(id).getDouble(1);
+ t=iTuple.get(id).getDouble(2);
+ hCharge.get(id).fill(e);
+ n++;
+ }
/*Finally do the fit with the gaussian*/
double[] initialPars1={hCharge.get(id).maxBinHeight(),hCharge.get(id).mean(),hCharge.get(id).rms()};
@@ -532,10 +551,10 @@
fResult=fFitter.fit(hCharge.get(id),fFunction1);
fPars = fResult.fittedParameters();
fParErrs = fResult.errors();
- fParNames = fResult.fittedParameterNames();
+ fParNames = fResult.fittedParameterNames();
System.out.println("Status= "+fResult.fitStatus()+" "+fResult.isValid()+" Chi2 = "+fResult.quality()+" NDF: "+fResult.ndf());
for(int i=0; i< fResult.fittedFunction().numberOfParameters(); i++ ){
- System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
+ System.out.println(fParNames[i]+" : "+fPars[i]+" +- "+fParErrs[i]);
}
fFunction1.setParameters(fPars);
mMean[id]=fPars[1];
@@ -554,6 +573,15 @@
style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
pPlotter2.region(0).plot(hMeanCharge2D);
pPlotter2.region(0).refresh();
+
+
+ style = pPlotter2.region(1).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
+ pPlotter2.region(1).refresh();
+
}
else{
IPlotterStyle pstyle = aida.analysisFactory().createPlotterFactory().createPlotterStyle();
@@ -563,60 +591,72 @@
pstyle.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
pstyle.setParameter("hist2DStyle", "colorMap");
if (pPlotter2!=null){
- pPlotter2.createRegion().plot(hMeanCharge2D,pstyle);
- pPlotter2.show();
+ pPlotter2.createRegion().plot(hMeanCharge2D,pstyle);
+ pPlotter2.show();
}
}
if (isMonitoringApp){
askUploadToDBDialog();
synchronized (modalMonitor) {
- try{
- modalMonitor.wait(60000); //wait 1 minute for user interaction.
- }
- catch(InterruptedException excp){
- System.out.println("Got exception: "+excp);
- }
+ try{
+ modalMonitor.wait(60000); //wait 1 minute for user interaction.
+ }
+ catch(InterruptedException excp){
+ System.out.println("Got exception: "+excp);
+ }
}
if ((m_ret!=LedColor.UNKNOWN)){
- if (m_ret==LedColor.BLUE) System.out.println("OK, upload to DB BLUE");
- else System.out.println("OK, upload to DB RED");
- try {
- uploadToDB(m_ret);
- } catch (SQLException | DatabaseObjectException | ConditionsObjectException error) {
- throw new RuntimeException("Error uploading to the database ", error);
- }
-
- System.out.println("Save an Elog too");
- uploadToElog();
+ if (m_ret==LedColor.BLUE) System.out.println("OK, upload to DB BLUE");
+ else System.out.println("OK, upload to DB RED");
+ try {
+ uploadToDB(m_ret);
+ } catch (SQLException | DatabaseObjectException | ConditionsObjectException error) {
+ throw new RuntimeException("Error uploading to the database ", error);
+ }
+
+ System.out.println("Get reference data, produce reference ratio map");
+ compareWithReference(m_ret);
+
+ style = pPlotter2.region(1).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
+ pPlotter2.region(1).refresh();
+
+ System.out.println("Save an Elog too");
+ uploadToElog();
}
else{
- System.out.println("Cancel pressed. Nothing to do");
- }
- }
+ System.out.println("Cancel pressed. Nothing to do");
+ }
+ }
+
+
/*Write a file with the LED values*/
try {
if (useRawEnergy){
- outFileName=runNumber+".raw.txt";
+ outFileName=runNumber+".raw.txt";
}
else{
- outFileName=runNumber+".energy.txt";
+ outFileName=runNumber+".energy.txt";
}
PrintWriter writer = new PrintWriter(outFileName, "UTF-8");
for (int cid = 1; cid <= 442; cid++) {/*This is a loop over the channel ID, as in the conditions system*/
- EcalChannel cc = findChannel(cid);
- column = cc.getX(); //This is the column
- row = cc.getY(); //This is the row
- id=EcalMonitoringUtilities.getHistoIDFromRowColumn(row,column);
- row = EcalMonitoringUtilities.getRowFromHistoID(id);
- column = EcalMonitoringUtilities.getColumnFromHistoID(id);
- if (EcalMonitoringUtilities.isInHole(row, column)) continue;
- if ((row == 0) || (column == 0)) continue;
-
- writer.print(cid+" "+column+" "+row+" "+" "+ mMean[id]+" "+mRMS[id]+"\r\n");
-
+ EcalChannel cc = findChannel(cid);
+ column = cc.getX(); //This is the column
+ row = cc.getY(); //This is the row
+ id=EcalMonitoringUtilities.getHistoIDFromRowColumn(row,column);
+ row = EcalMonitoringUtilities.getRowFromHistoID(id);
+ column = EcalMonitoringUtilities.getColumnFromHistoID(id);
+ if (EcalMonitoringUtilities.isInHole(row, column)) continue;
+ if ((row == 0) || (column == 0)) continue;
+
+ writer.print(cid+" "+column+" "+row+" "+" "+ mMean[id]+" "+mRMS[id]+"\r\n");
+
}
writer.close();
@@ -632,28 +672,30 @@
System.out.println(ioe.getMessage());
}
-
-
+
+
+
+
System.out.println("EcalLedSequenceMonitor endOfData clear histograms");
- for(int ii = 0; ii < NUM_CHANNELS; ii++) {
+ for(int ii = 0; ii < NUM_CHANNELS; ii++) {
row=EcalMonitoringUtilities.getRowFromHistoID(ii);
column = EcalMonitoringUtilities.getColumnFromHistoID(ii);
- hName="charge_"+ii;
+ hName="charge_"+ii;
try{
- aida.tree().rm(hName);
+ aida.tree().rm(hName);
}
catch(IllegalArgumentException ee){
- System.out.println("Got exception "+ee);
+ System.out.println("Got exception "+ee);
}
if (!saveTuple||(isMonitoringApp)){
- hName="nTuple"+ii;
- try{
- aida.tree().rm(hName);
- }
- catch(IllegalArgumentException ee){
- System.out.println("Got exception "+ee);
- }
+ hName="nTuple"+ii;
+ try{
+ aida.tree().rm(hName);
+ }
+ catch(IllegalArgumentException ee){
+ System.out.println("Got exception "+ee);
+ }
}
}
System.out.println("EcalLedSequenceMonitor endOfData clear histograms done");
@@ -665,10 +707,10 @@
/**
* This function returns the driver number (from 0 to 3) given the LED id.
* @param led
- * @return
+ * @return the driver number from the LED id
*/
public int getDriver(int led){
- int ret=-1;
+ int ret=-1;
if ((led>=2)&&(led<56)) ret=0;
else if ((led>=56)&&(led<112)) ret=1;
else if ((led>=112)&&(led<168)) ret=2;
@@ -681,7 +723,7 @@
* If the gain changes (because we do a re-calibration), I do not want to include this in the LED analysis
* @param energy
* @param cellID
- * @return
+ * @return the pedestal-subtracted raw energy
*/
public double getRawADCSum(double energy,long cellID){
EcalChannelConstants channelData = ecalConditions.getChannelConstants(ecalConditions.getChannelCollection().findGeometric(cellID));
@@ -696,7 +738,7 @@
int x,y,id;
double mean,rms;
System.out.println(String.format("Uploading new led data to the database, runMin=%d, runMax=%d, tag=%s ....",
- runNumber,runNumberMax,dbTag));
+ runNumber,runNumberMax,dbTag));
conditionsManager = DatabaseConditionsManager.getInstance();
EcalLedCalibrationCollection led_calibrations = new EcalLedCalibrationCollection();
@@ -726,8 +768,8 @@
System.err.println("CollectionID: "+collectionId);
led_calibrations.insert();
ConditionsRecord conditionsRecord = new ConditionsRecord(
- led_calibrations.getCollectionId(), runNumber, runNumberMax, dbTableName, dbTableName,
- "Generated by LedAnalysis from Run #"+runNumber, dbTag);
+ led_calibrations.getCollectionId(), runNumber, runNumberMax, dbTableName, dbTableName,
+ "Generated by LedAnalysis from Run #"+runNumber, dbTag);
conditionsRecord.setConnection(conditionsManager.getConnection());
tableMetaData = conditionsManager.findTableMetaData("conditions");
conditionsRecord.setTableMetaData(tableMetaData);
@@ -772,6 +814,79 @@
}
}
+ private void compareWithReference(LedColor color){
+ int ID=0;
+ int x,y,chid;
+ double mean,rms,fillData=1;
+ if (color==LedColor.UNKNOWN){
+ System.out.println("LedMonitoringSequence::compare with reference, doing nothing");
+ return;
+ }
+ else if (color==LedColor.RED) ID=fRedReferenceID;
+ else if (color==LedColor.BLUE) ID=fBlueReferenceID;
+
+ conditionsManager = DatabaseConditionsManager.getInstance();
+
+
+ EcalLedCalibrationCollection referenceDataCollection = new EcalLedCalibrationCollection();
+ referenceDataCollection.setConnection(conditionsManager.getConnection());
+
+ TableMetaData tableMetaData = conditionsManager.findTableMetaData(dbTableName);
+ referenceDataCollection.setTableMetaData(tableMetaData);
+ System.out.println("Try to get reference data from DB. Collection ID is "+ID);
+ try {
+ referenceDataCollection.select(ID);
+ } catch (SQLException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (DatabaseObjectException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ /*Now data from the reference should be there*/
+ for (EcalLedCalibration referenceData : referenceDataCollection){
+
+ chid=referenceData.getFieldValue("ecal_channel_id");
+ mean=referenceData.getFieldValue("led_response");
+ rms=referenceData.getFieldValue("rms");
+
+ EcalChannel cc = findChannel(chid);
+ column = cc.getX(); //This is the column
+ row = cc.getY(); //This is the row
+ chid=EcalMonitoringUtilities.getHistoIDFromRowColumn(row,column);
+ row = EcalMonitoringUtilities.getRowFromHistoID(id);
+ column = EcalMonitoringUtilities.getColumnFromHistoID(id);
+
+
+ if (mean!=0) fillData=mMean[id]/mean;
+ else fillData=1;
+ System.out.println("row= "+row+" column= "+column+" data= "+mMean[id]+" ref= "+mean+" ratio= "+fillData);
+ hMeanCharge2DReferenceRatio.fill(column,row,fillData);
+
+
+
+ }
+
+
+
+ style = pPlotter2.region(0).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(0).plot(hMeanCharge2D);
+ pPlotter2.region(0).refresh();
+
+
+ style = pPlotter2.region(1).style();
+ style.setParameter("hist2DStyle", "colorMap");
+ style.dataStyle().fillStyle().setParameter("colorMapScheme", "rainbow");
+ style.dataStyle().fillStyle().setParameter("showZeroHeightBins", Boolean.FALSE.toString());
+ pPlotter2.region(1).plot(hMeanCharge2DReferenceRatio);
+ pPlotter2.region(1).refresh();
+
+
+ }
private void drawProfiles(int ledID,int driverID){
@@ -819,9 +934,9 @@
okButtonBlue = new JButton("Yes, blue");
cancelButton = new JButton("Cancel");
labelString = "<html> Update conditions to DB <br> for run: <br> "+runNumber+" - "+runNumberMax+" <br> ???? <br> "
- + "Use the monitoring app to look at the map<br>"
- + "(Tab LED sequence)<br>"
- +"Reply in 60 seconds<br>"+"</html>";
+ + "Use the monitoring app to look at the map<br>"
+ + "(Tab LED sequence)<br>"
+ +"Reply in 60 seconds<br>"+"</html>";
label = new JLabel( labelString);
frame = new JFrame("Upload to DB?");
@@ -848,43 +963,43 @@
okButtonBlue.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
- m_ret=LedColor.BLUE;
- frame.dispose();
- synchronized(modalMonitor)
- {
- System.out.println("Blue pressed");
- modalMonitor.notify();
- }
- }
- }
- );
+ m_ret=LedColor.BLUE;
+ frame.dispose();
+ synchronized(modalMonitor)
+ {
+ System.out.println("Blue pressed");
+ modalMonitor.notify();
+ }
+ }
+ }
+ );
okButtonRed.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
- m_ret=LedColor.RED;
- frame.dispose();
- synchronized(modalMonitor)
- {
- System.out.println("Red pressed");
- modalMonitor.notify();
- }
- }
- }
- );
+ m_ret=LedColor.RED;
+ frame.dispose();
+ synchronized(modalMonitor)
+ {
+ System.out.println("Red pressed");
+ modalMonitor.notify();
+ }
+ }
+ }
+ );
cancelButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
- m_ret=LedColor.UNKNOWN;
- frame.dispose();
- synchronized(modalMonitor)
- {
- System.out.println("Cancel pressed");
- modalMonitor.notify();
- }
- }
- }
- );
+ m_ret=LedColor.UNKNOWN;
+ frame.dispose();
+ synchronized(modalMonitor)
+ {
+ System.out.println("Cancel pressed");
+ modalMonitor.notify();
+ }
+ }
+ }
+ );
System.out.println("askUploadDB done");
}
Modified: java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java
=============================================================================
--- java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java (original)
+++ java/branches/jeremy-dev2/monitoring-drivers/src/main/java/org/hps/monitoring/ecal/plots/EcalMonitoringPlots.java Mon Mar 21 18:04:10 2016
@@ -59,7 +59,7 @@
/**
* Set the refresh rate for histograms in this driver
- * @param eventRefreshRate: the refresh rate, defined as number of events to accumulate before
+ * @param eventRefreshRate the refresh rate, defined as number of events to accumulate before
* refreshing the plot
*/
public void setEventRefreshRate(int eventRefreshRate) {
Modified: java/branches/jeremy-dev2/monitoring-util/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/monitoring-util/pom.xml (original)
+++ java/branches/jeremy-dev2/monitoring-util/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/monitoring-util/</url>
Modified: java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java
=============================================================================
--- java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java (original)
+++ java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/plotting/ExportPdf.java Mon Mar 21 18:04:10 2016
@@ -45,12 +45,10 @@
/**
* Save a set of tabs containing plots to a file.
*
- * @param plotTabs the top level tab component (plots are actually in a set
- * of tabs without these tabs)
+ * @param plotters the list of plotters to save (from plots in the regions)
* @param fileName the file name
* @param runData the list of run data to save on the cover page
- * @throws IOException if there is a problem with the IO (e.g. writing to
- * PDF file)
+ * @throws IOException if there is a problem with the IO (e.g. writing to PDF file)
*/
public static void write(List<IPlotter> plotters, String fileName, List<String> runData)
throws IOException {
Modified: java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java
=============================================================================
--- java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java (original)
+++ java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/subsys/et/package-info.java Mon Mar 21 18:04:10 2016
@@ -1,7 +1,5 @@
/**
* ET subsystem monitoring
- * <p>
- * {@link EtSystemMonitor} implements basic status checks of the ET system.
*
* @author Jeremy McCormick, SLAC
*/
Modified: java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java
=============================================================================
--- java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java (original)
+++ java/branches/jeremy-dev2/monitoring-util/src/main/java/org/hps/monitoring/trigger/DiagnosticUpdatable.java Mon Mar 21 18:04:10 2016
@@ -9,14 +9,15 @@
* alter their displayed or constituent values.
*
* @author Kyle McCarty <[log in to unmask]>
- * @see DiagSnapshot
+ * @see org.hps.analysis.trigger.data.DiagnosticSnapshot
*/
public interface DiagnosticUpdatable {
/**
* Updates the object with information from the trigger diagnostic
* snapshot in the argument.
- * @param snapshot - The snapshot containing information with which
- * to update the object.
+ * @param runSnapshot the accumulated snapshot
+ * @param localSnapshot The snapshot containing information with which
+ * to update the object.
*/
public void updatePanel(DiagnosticSnapshot runSnapshot, DiagnosticSnapshot localSnapshot);
}
Modified: java/branches/jeremy-dev2/parent/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/parent/pom.xml (original)
+++ java/branches/jeremy-dev2/parent/pom.xml Mon Mar 21 18:04:10 2016
@@ -1,5 +1,4 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<prerequisites>
<maven>3.0</maven>
</prerequisites>
@@ -7,14 +6,14 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<packaging>pom</packaging>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
<name>parent</name>
<description>HPS Java parent POM</description>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<org.lcsim.cacheDir>${user.home}</org.lcsim.cacheDir>
- <lcsimVersion>3.3-SNAPSHOT</lcsimVersion>
+ <lcsimVersion>3.5-SNAPSHOT</lcsimVersion>
<skipSite>false</skipSite>
<skipPlugin>false</skipPlugin>
</properties>
@@ -144,112 +143,112 @@
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-util</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-detector-data</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-detector-model</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-conditions</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-ecal-recon</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-ecal-readout-sim</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-tracking</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-evio</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-recon</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-analysis</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-monitoring-drivers</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-monitoring-app</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-users</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-steering-files</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-distribution</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-ecal-event-display</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-record-util</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-monitoring-util</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-run-database</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-crawler</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-job</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.hps</groupId>
<artifactId>hps-logging</artifactId>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</dependency>
<!-- Next are external dependencies used in multiple modules. -->
<dependency>
@@ -459,8 +458,7 @@
<configuration>
<target>
<!-- Create the directory for test output. -->
- <mkdir
- dir="${project.build.directory}/test-output" />
+ <mkdir dir="${project.build.directory}/test-output" />
</target>
</configuration>
<goals>
@@ -491,16 +489,16 @@
<checkstyleRules>
<module name="Checker">
<module name="FileTabCharacter">
- <property name="fileExtensions" value="java,xml,lcsim,prop,properties"/>
+ <property name="fileExtensions" value="java,xml,lcsim,prop,properties" />
</module>
<module name="TreeWalker">
<module name="UnusedImports">
- <property name="processJavadoc" value="false"/>
+ <property name="processJavadoc" value="false" />
</module>
- <module name="RedundantImport"/>
+ <module name="RedundantImport" />
<!--
- <module name="EmptyBlock"/>
- <module name="EmptyStatement"/>
+ <module name="EmptyBlock" />
+ <module name="EmptyStatement" />
-->
</module>
</module>
Modified: java/branches/jeremy-dev2/plugin/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/plugin/pom.xml (original)
+++ java/branches/jeremy-dev2/plugin/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/plugin/</url>
Modified: java/branches/jeremy-dev2/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/pom.xml (original)
+++ java/branches/jeremy-dev2/pom.xml Mon Mar 21 18:04:10 2016
@@ -9,7 +9,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>svn://svn.freehep.org/hps/java/trunk/</url>
Modified: java/branches/jeremy-dev2/recon/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/recon/pom.xml (original)
+++ java/branches/jeremy-dev2/recon/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/recon/</url>
Modified: java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java
=============================================================================
--- java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java (original)
+++ java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/EcalPairsFilter.java Mon Mar 21 18:04:10 2016
@@ -18,6 +18,7 @@
private String clusterCollectionName = "EcalClusters";
private double maxDt = 2.5;
+ private boolean strictPairs = false;
public void setClusterCollectionName(String clusterCollectionName) {
this.clusterCollectionName = clusterCollectionName;
@@ -27,12 +28,19 @@
this.maxDt = maxDt;
}
+ public void setStrictPairs(boolean strictPairs) {
+ this.strictPairs = strictPairs;
+ }
+
@Override
public void process(EventHeader event) {
incrementEventProcessed();
if (event.hasCollection(Cluster.class, clusterCollectionName)) {
List<Cluster> clusters = event.get(Cluster.class, clusterCollectionName);
if (clusters.size() < 2) {
+ skipEvent();
+ }
+ if (strictPairs && clusters.size() > 2) {
skipEvent();
}
List<Double> clusterTimes = new ArrayList<Double>();
@@ -52,7 +60,7 @@
skipEvent();
}
} else {
- skipEvent();
+ skipEvent();
}
incrementEventPassed();
}
Modified: java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java
=============================================================================
--- java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java (original)
+++ java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/filtering/FEEFilterDriver.java Mon Mar 21 18:04:10 2016
@@ -1,16 +1,38 @@
package org.hps.recon.filtering;
+import org.lcsim.event.Cluster;
+import org.hps.recon.ecal.cluster.ClusterUtilities;
+import org.hps.record.epics.EpicsData;
import org.hps.record.triggerbank.AbstractIntData;
import org.hps.record.triggerbank.TIData;
+import org.lcsim.event.EventHeader;
import org.lcsim.event.GenericObject;
-
-import org.lcsim.event.Cluster;
-import org.lcsim.event.EventHeader;
import org.lcsim.util.Driver;
-import org.hps.recon.ecal.cluster.ClusterUtilities;
-import org.hps.record.epics.EpicsData;
public class FEEFilterDriver extends Driver
{
+ //Set min seed energy value, default to 2015 run
+ private double seedCut = 0.4;
+
+ //set min cluster energy value, default to 2015 run
+ private double clusterCut = 0.6;
+
+ /**
+ * Set the cut value for seed energy in GeV
+ * @param seedCut
+ */
+ void setSeedCut(double seedCut) {
+ this.seedCut = seedCut;
+ }
+
+ /**
+ * Set the cut value for cluster energy in GeV
+ * @param clusterCut
+ */
+ void setClusterCut(double clusterCut) {
+ this.clusterCut = clusterCut;
+ }
+
+
public void process(EventHeader event) {
// don't drop any events with EPICS data:
@@ -46,8 +68,8 @@
// keep events with a cluster over 600 MeV with seed over 400 MeV (for 2015 running).
// keep events with cluster over 1.2 GeV and seed over 650 MeV for 2016 running.
- if (cc.getEnergy() > 1.2 &&
- ClusterUtilities.findSeedHit(cc).getCorrectedEnergy() > 0.65)
+ if (cc.getEnergy() > clusterCut &&
+ ClusterUtilities.findSeedHit(cc).getCorrectedEnergy() > seedCut )
return;
}
Modified: java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java
=============================================================================
--- java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java (original)
+++ java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/particle/ReconParticleDriver.java Mon Mar 21 18:04:10 2016
@@ -230,7 +230,7 @@
* clusters. Clusters will be matched with tracks when this is possible.
*
* @param clusters - The list of event clusters.
- * @param tracks - The list of event tracks.
+ * @param trackCollections - The list of event tracks.
* @return Returns a <code>List</code> collection containing all of the
* <code>ReconstructedParticle</code> objects generated from the argument
* data.
Modified: java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java
=============================================================================
--- java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java (original)
+++ java/branches/jeremy-dev2/recon/src/main/java/org/hps/recon/utils/TrackClusterMatcher.java Mon Mar 21 18:04:10 2016
@@ -253,11 +253,11 @@
* and uncorrected cluster positions.
*
* @param cluster = position-uncorrected cluster
- * @param track
+ * @param particle recon particle with tracks
*
* @return #sigma between cluster and track positions
*/
- public double getNSigmaPosition(Cluster cluster,ReconstructedParticle particle) {
+ public double getNSigmaPosition(Cluster cluster, ReconstructedParticle particle) {
if (particle.getTracks().size()<1) return Double.MAX_VALUE;
Track track=particle.getTracks().get(0);
Modified: java/branches/jeremy-dev2/record-util/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/record-util/pom.xml (original)
+++ java/branches/jeremy-dev2/record-util/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/record-util/</url>
Modified: java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java
=============================================================================
--- java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java (original)
+++ java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/AbstractRecordQueue.java Mon Mar 21 18:04:10 2016
@@ -45,7 +45,8 @@
/**
* Class constructor with the timeout in seconds.
*
- * @param timeoutSeconds the timeout in seconds
+ * @param timeOutMillis the timeout in seconds
+ * @param maxSize the maximum size of the queue
*/
public AbstractRecordQueue(final long timeOutMillis, final int maxSize) {
this.timeOutMillis = timeOutMillis;
@@ -55,7 +56,7 @@
/**
* Add a record to the queue if there is space.
*
- * @param event the LCIO event to add
+ * @param record the LCIO event to add
*/
// FIXME: Should drain queue if over capacity.
public void addRecord(final RecordType record) {
Modified: java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java
=============================================================================
--- java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java (original)
+++ java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/composite/RecordProcessorAdapter.java Mon Mar 21 18:04:10 2016
@@ -44,7 +44,7 @@
/**
* Activate the <code>endJob</code> methods of the registered processors.
*
- * @param the <code>LoopEvent</code> which activated <code>finish</code>
+ * @param loopEvent the <code>LoopEvent</code> which activated <code>finish</code>
*/
@Override
public void finish(final LoopEvent loopEvent) {
@@ -80,7 +80,7 @@
/**
* Activate the <code>startJob</code> methods of the registered processors.
*
- * @param the <code>LoopEvent</code> which activated the start
+ * @param loopEvent the <code>LoopEvent</code> which activated the start
*/
@Override
public void start(final LoopEvent loopEvent) {
@@ -103,7 +103,7 @@
/**
* Activate the <code>suspend</code> methods of the registered processors.
*
- * @param the <code>LoopEvent</code> which activated <code>suspend</code>.
+ * @param loopEvent the <code>LoopEvent</code> which activated <code>suspend</code>.
*/
@Override
public void suspend(final LoopEvent loopEvent) {
Modified: java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java
=============================================================================
--- java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java (original)
+++ java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/svt/SvtEvioUtils.java Mon Mar 21 18:04:10 2016
@@ -1,11 +1,16 @@
package org.hps.record.svt;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.jlab.coda.jevio.BaseStructure;
+import org.jlab.coda.jevio.DataType;
+import org.jlab.coda.jevio.StructureType;
/**
* A set of static utility methods used to decode SVT data.
*
* @author Omar Moreno <[log in to unmask]>
- * @date November 20, 2014
*/
public class SvtEvioUtils {
@@ -19,9 +24,7 @@
private static final int APV_HEADER_BUFFER_ADDRESS_MASK = 0xFF; //[8:1]
private static final int APV_HEADER_DATA_FRAME_COUNT_MASK = 0xF; //[12:9]
private static final int APV_HEADER_DATA_APV_NR_MASK = 0x3; //[15:13]
-
-
-
+
// TODO: Move these to constants class
public static final int APV25_PER_HYBRID = 5;
public static final int CHANNELS_PER_APV25 = 128;
@@ -90,13 +93,12 @@
* Extract and return the front end board (FEB) ID associated with the
* multisample tail
*
- * @param data : a multisample header
+ * @param multisampleTail : a multisample header
* @return A FEB ID in the range 0-10
*/
public static int getFebIDFromMultisampleTail(int multisampleTail) {
return (multisampleTail >>> 8) & FEB_MASK;
}
-
/**
* Extract and return the front end board (FEB) hybrid ID associated with
@@ -113,7 +115,7 @@
* Extract and return the front end board (FEB) hybrid ID associated with
* the multisample tail
*
- * @param multisample : a multisample tail
+ * @param multisampleTail : a multisample tail
* @return A FEB hybrid ID in the range 0-3
*/
public static int getFebHybridIDFromMultisampleTail(int multisampleTail) {
@@ -258,7 +260,7 @@
/**
* Extract the error bit from the multisample header.
*
- * @param data : multisample of data
+ * @param multisampleHeader : multisample of data
* @return value of the error bit. This is non-zero if there is an error.
*/
public static int getErrorBitFromMultisampleHeader(int multisampleHeader) {
@@ -441,6 +443,56 @@
return samples;
}
+ /**
+ * Retrieve all the banks in an event that match the given tag in their
+ * header and are not data banks.
+ *
+ * @param evioEvent : The event/bank being queried
+ * @param tag : The tag to match
+ * @return A collection of all bank structures that pass the filter
+ * provided by the event
+ */
+ public static List<BaseStructure> getROCBanks(BaseStructure evioEvent, int minROCTag, int maxROCTag) {
+ List<BaseStructure> matchingBanks = new ArrayList<BaseStructure>();
+ if (evioEvent.getChildCount() > 0) {
+ for (BaseStructure childBank : evioEvent.getChildrenList()) {
+ if (childBank.getStructureType() == StructureType.BANK
+ && childBank.getHeader().getDataType() == DataType.ALSOBANK
+ && childBank.getHeader().getTag() >= minROCTag
+ && childBank.getHeader().getTag() <= maxROCTag) {
+ matchingBanks.add(childBank);
+ }
+ }
+ }
+ return matchingBanks;
+ }
+
+ public static List<BaseStructure> getDataBanks(BaseStructure evioEvent, int minROCTag, int maxROCTag, int minDataTag, int maxDataTag) {
+ List<BaseStructure> rocBanks = getROCBanks(evioEvent, minROCTag, maxROCTag);
+ List<BaseStructure> matchingBanks = new ArrayList<BaseStructure>();
+ for (BaseStructure rocBank : rocBanks) {
+ if (rocBank.getChildCount() > 0) {
+ for (BaseStructure childBank : rocBank.getChildrenList()) {
+ if (childBank.getHeader().getTag() >= minDataTag
+ && childBank.getHeader().getTag() <= maxDataTag) {
+ matchingBanks.add(childBank);
+ }
+ }
+ }
+ }
+ return matchingBanks;
+ }
+
+ public static List<int[]> getMultisamples(int[] data, int sampleCount, int headerLength) {
+ List<int[]> sampleList = new ArrayList<int[]>();
+ // Loop through all of the samples and make hits
+ for (int samplesN = 0; samplesN < sampleCount; samplesN += 4) {
+ int[] samples = new int[4];
+ System.arraycopy(data, headerLength + samplesN, samples, 0, samples.length);
+ sampleList.add(samples);
+ }
+ return sampleList;
+ }
/**
* Private constructor to prevent the class from being instantiated.
Modified: java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java
=============================================================================
--- java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java (original)
+++ java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/AbstractIntData.java Mon Mar 21 18:04:10 2016
@@ -74,7 +74,6 @@
* Return the int bank of an AbstractIntData read from LCIO.
*
* @param object
- * @return
*/
public static int[] getBank(GenericObject object) {
int N = object.getNInt() - 1;
Modified: java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java
=============================================================================
--- java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java (original)
+++ java/branches/jeremy-dev2/record-util/src/main/java/org/hps/record/triggerbank/TriggerConfigData.java Mon Mar 21 18:04:10 2016
@@ -93,7 +93,7 @@
/**
* Load DAQ config object from trigger config string data.
*
- * @param the run number (needed by configuration manager)
+ * @param run the run number (needed by configuration manager)
* @return the DAQ config object
*/
public DAQConfig loadDAQConfig(int run) {
Modified: java/branches/jeremy-dev2/run-database/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/run-database/pom.xml (original)
+++ java/branches/jeremy-dev2/run-database/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/run-database/</url>
Modified: java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EpicsVariable.java
=============================================================================
--- java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EpicsVariable.java (original)
+++ java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EpicsVariable.java Mon Mar 21 18:04:10 2016
@@ -53,9 +53,9 @@
* Create an EPICs variable.
*
* @param variableName the name of the variable
- * @param columnName the column name in the run db
+ * @param columnName the column name in the run database
* @param description the variable's description
- * @param variableType the type of the variable
+ * @param type the integer encoding of the type
*/
public EpicsVariable(final String variableName, final String columnName, final String description, final int type) {
this.variableName = variableName;
Modified: java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EvioDataBuilder.java
=============================================================================
--- java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EvioDataBuilder.java (original)
+++ java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/EvioDataBuilder.java Mon Mar 21 18:04:10 2016
@@ -55,7 +55,6 @@
}
public void main(String args[]) {
-
if (args.length == 0) {
throw new RuntimeException("No command line arguments provided.");
}
@@ -68,14 +67,9 @@
builder.build();
if (!builder.getEpicsData().isEmpty()) {
- RunManager runManager = null;
- try {
- runManager = new RunManager();
- runManager.setRun(run);
- runManager.updateEpicsData(epicsData);
- } finally {
- runManager.closeConnection();
- }
+ RunManager runManager = new RunManager();
+ runManager.setRun(run);
+ runManager.updateEpicsData(epicsData);
} else {
LOGGER.warning("No EPICS data was found to insert into run database.");
}
Modified: java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java
=============================================================================
--- java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java (original)
+++ java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseBuilder.java Mon Mar 21 18:04:10 2016
@@ -113,12 +113,7 @@
* Run spreadsheet CSV file with supplementary information (not used by default).
*/
private File spreadsheetFile;
-
- /**
- * List of SVT configuration bank data.
- */
- //private List<SvtConfigData> svtConfigs;
-
+
/**
* The trigger config object.
*/
@@ -147,9 +142,9 @@
/**
* Reload state for the current run number for testing.
*/
- static void reload(Connection connection, int run) {
-
- RunManager runManager = new RunManager(connection);
+ static void reload(int run) {
+
+ RunManager runManager = new RunManager();
runManager.setRun(run);
RunSummary runSummary = runManager.getRunSummary();
@@ -161,15 +156,10 @@
List<ScalerData> scalerData = runManager.getScalerData();
LOGGER.info("loaded " + scalerData.size() + " scaler records");
-
- //List<SvtConfigData> svtConfigs = runManager.getSvtConfigData();
- //LOGGER.info("loaded " + svtConfigs.size() + " SVT configurations");
LOGGER.info("printing DAQ config ...");
DAQConfig daqConfig = runManager.getDAQConfig();
daqConfig.printConfig(System.out);
-
- runManager.closeConnection();
}
/**
@@ -340,17 +330,6 @@
throw new IllegalStateException("The detector name was not set.");
}
- // Initialize the conditions system because the DAQ config processor needs it.
- /*
- try {
- DatabaseConditionsManager dbManager = DatabaseConditionsManager.getInstance();
- DatabaseConditionsManager.getInstance().setDetector(detectorName, runSummary.getRun());
- dbManager.freeze();
- } catch (ConditionsNotFoundException e) {
- throw new RuntimeException(e);
- }
- */
-
// List of processors to execute in the job.
ArrayList<AbstractRecordProcessor<EvioEvent>> processors = new ArrayList<AbstractRecordProcessor<EvioEvent>>();
@@ -467,7 +446,7 @@
// Optionally load back run information.
if (reload) {
LOGGER.info("reloading data for run " + getRun() + " ...");
- reload(connectionParameters.createConnection(), getRun());
+ reload(getRun());
}
} else {
@@ -606,7 +585,7 @@
// Initialize the run manager.
Connection connection = connectionParameters.createConnection();
- RunManager runManager = new RunManager(connection);
+ RunManager runManager = new RunManager();
runManager.setRun(runSummary.getRun());
// Turn off autocommit to start transaction.
@@ -645,10 +624,7 @@
} catch (SQLException e2) {
throw new RuntimeException("Error performing rollback.", e2);
}
- }
-
- // Close the database connection.
- runManager.closeConnection();
+ }
}
/**
Modified: java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java
=============================================================================
--- java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java (original)
+++ java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunDatabaseCommandLine.java Mon Mar 21 18:04:10 2016
@@ -4,11 +4,10 @@
import java.net.URISyntaxException;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.hps.conditions.database.ConnectionParameters;
+import org.apache.commons.cli.PosixParser;
import org.srs.datacat.client.Client;
import org.srs.datacat.client.ClientBuilder;
@@ -30,7 +29,6 @@
static {
OPTIONS.addOption("h", "help", false, "print help and exit (overrides all other arguments)");
OPTIONS.addOption("r", "run", true, "run to update");
- OPTIONS.addOption("p", "connection-properties", true, "database connection properties file (required)");
OPTIONS.addOption("Y", "dry-run", false, "dry run which will not update the database");
OPTIONS.addOption("x", "replace", false, "allow deleting and replacing an existing run");
OPTIONS.addOption("s", "spreadsheet", true, "path to run database spreadsheet (CSV format)");
@@ -85,12 +83,7 @@
* Load back run information after insert (for debugging).
*/
private boolean reload = false;
-
- /**
- * Database connection parameters.
- */
- private ConnectionParameters connectionParameters = null;
-
+
/**
* Data catalog client interface.
*/
@@ -126,20 +119,6 @@
final HelpFormatter help = new HelpFormatter();
help.printHelp("RunDatabaseCommandLine [options]", "", OPTIONS, "");
System.exit(0);
- }
-
- // Database connection properties file.
- if (cl.hasOption("p")) {
- final String dbPropPath = cl.getOptionValue("p");
- final File dbPropFile = new File(dbPropPath);
- if (!dbPropFile.exists()) {
- throw new IllegalArgumentException("Connection properties file " + dbPropFile.getPath()
- + " does not exist.");
- }
- connectionParameters = ConnectionParameters.fromProperties(dbPropFile);
- } else {
- // Database connection properties file is required.
- throw new RuntimeException("Connection properties are a required argument.");
}
// Run number.
@@ -219,7 +198,6 @@
.createRunSummary(run)
.setFolder(folder)
.setDetectorName(detectorName)
- .setConnectionParameters(connectionParameters)
.setDatacatClient(datacatClient)
.setSite(site)
.setDryRun(dryRun)
@@ -228,5 +206,5 @@
.setSpreadsheetFile(spreadsheetFile)
.setReload(reload)
.run();
- }
+ }
}
Modified: java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunManager.java
=============================================================================
--- java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunManager.java (original)
+++ java/branches/jeremy-dev2/run-database/src/main/java/org/hps/run/database/RunManager.java Mon Mar 21 18:04:10 2016
@@ -5,7 +5,8 @@
import java.util.List;
import java.util.logging.Logger;
-import org.hps.conditions.database.ConnectionParameters;
+import javax.sql.DataSource;
+
import org.hps.record.daqconfig.DAQConfig;
import org.hps.record.epics.EpicsData;
import org.hps.record.scalers.ScalerData;
@@ -14,19 +15,27 @@
import org.lcsim.conditions.ConditionsEvent;
import org.lcsim.conditions.ConditionsListener;
+import com.mysql.jdbc.jdbc2.optional.MysqlDataSource;
+
/**
* Manages access to the run database.
- *
+ *
* @author Jeremy McCormick, SLAC
*/
public final class RunManager implements ConditionsListener {
-
- /**
- * The default connection parameters for read-only access to the run database.
- */
- private static ConnectionParameters DEFAULT_CONNECTION_PARAMETERS = new ConnectionParameters("hpsuser",
- "darkphoton", "hps_run_db_v2", "hpsdb.jlab.org");
-
+
+ /* Database connection system properties. */
+ private static final String DATABASE_PROPERTY = "org.hps.rundb.connection.database";
+ private static final String USER_PROPERTY = "org.hps.rundb.connection.user";
+ private static final String PASSWORD_PROPERTY = "org.hps.rundb.connection.password";
+ private static final String HOST_PROPERTY = "org.hps.rundb.connection.host";
+ private static final String PORT_PROPERTY = "org.hps.rundb.connection.port";
+
+ /**
+ * The SQL data source.
+ */
+ private DataSource dataSource;
+
/**
* The singleton instance of the RunManager.
*/
@@ -36,7 +45,7 @@
* Initialize the logger.
*/
private static final Logger LOGGER = Logger.getLogger(RunManager.class.getPackage().getName());
-
+
/**
* Get the global instance of the {@link RunManager}.
* @return the global instance of the {@link RunManager}
@@ -47,17 +56,64 @@
}
return INSTANCE;
}
-
- /**
- * The active database connection.
- */
- private Connection connection;
-
- /**
- * Factory for creating database API objects.
- */
- private final DaoProvider factory;
-
+
+ /**
+ * Create the <code>DataSource</code> to connect to the run database.
+ * @return the <code>DataSource</code> for connecting to the run database
+ */
+ private static DataSource createDataSource() {
+
+ MysqlDataSource dataSource = new MysqlDataSource();
+
+ // Default settings.
+ String user = "hpsuser";
+ String password = "darkphoton";
+ String host = "hpsdb.jlab.org";
+ int port = 3306;
+ String database = "hps_run_db_v2";
+
+ // Settings possibly overridden by Java properties.
+ if (System.getProperty(USER_PROPERTY) != null) {
+ user = System.getProperty(USER_PROPERTY);
+ }
+ if (System.getProperty(PASSWORD_PROPERTY) != null) {
+ password = PASSWORD_PROPERTY;
+ }
+ if (System.getProperty(HOST_PROPERTY) != null) {
+ host = System.getProperty(HOST_PROPERTY);
+ }
+ if (System.getProperty(PORT_PROPERTY) != null) {
+ port = Integer.parseInt(System.getProperty(PORT_PROPERTY));
+ }
+ if (System.getProperty(DATABASE_PROPERTY) != null) {
+ database = System.getProperty(DATABASE_PROPERTY);
+ }
+
+ dataSource.setUser(user);
+ dataSource.setPassword(password);
+ dataSource.setServerName(host);
+ dataSource.setPort(port);
+ dataSource.setDatabaseName(database);
+ dataSource.setAutoReconnect(true);
+ dataSource.setConnectTimeout(120000);
+ try {
+ dataSource.setLoginTimeout(120);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+
+ StringBuffer sb = new StringBuffer();
+ sb.append("user = " + user + '\n');
+ sb.append("password = " + password + '\n');
+ sb.append("host = " + host + '\n');
+ sb.append("port = " + port + '\n');
+ sb.append("database = " + database + '\n');
+
+ LOGGER.config("Created new rundb data source: " + sb.toString());
+
+ return dataSource;
+ }
+
/**
* The run number; the -1 value indicates that this has not been set externally yet.
*/
@@ -65,40 +121,19 @@
/**
* Class constructor.
- * @param connection the database connection
- */
- public RunManager(final Connection connection) {
- try {
- if (connection.isClosed()) {
- throw new RuntimeException("The connection is already closed and cannot be used.");
- }
- } catch (SQLException e) {
- throw new RuntimeException(e);
- }
- this.connection = connection;
- factory = new DaoProvider(this.connection);
- }
-
- /**
- * Class constructor using default connection parameters.
+ * @param connection the data source with connection information
+ */
+ public RunManager(DataSource dataSource) {
+ this.dataSource = dataSource;
+ }
+
+ /**
+ * Class constructor using default data source.
*/
public RunManager() {
- this(DEFAULT_CONNECTION_PARAMETERS.createConnection());
- }
-
- /**
- * Close the database connection.
- */
- public void closeConnection() {
- try {
- if (!this.connection.isClosed()) {
- this.connection.close();
- }
- } catch (final SQLException e) {
- e.printStackTrace();
- }
- }
-
+ dataSource = createDataSource();
+ }
+
/**
* Load new run information when conditions have changed.
* @param conditionsEvent the event with new conditions information
@@ -109,20 +144,16 @@
}
/**
- * Return the database connection.
- * @return the database connection
- */
- Connection getConnection() {
- return this.connection;
- }
-
- /**
* Get the EPICS data for the current run.
* @param epicsType the type of EPICS data
* @return the EPICS data for the current run
*/
public List<EpicsData> getEpicsData(final EpicsType epicsType) {
- return factory.getEpicsDataDao().getEpicsData(epicsType, this.run);
+ try (Connection connection = dataSource.getConnection()) {
+ return new DaoProvider(connection).getEpicsDataDao().getEpicsData(epicsType, this.run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -131,7 +162,11 @@
* @return the list of EPICS variable definitions
*/
public List<EpicsVariable> getEpicsVariables(final EpicsType epicsType) {
- return factory.getEpicsVariableDao().getEpicsVariables(epicsType);
+ try (Connection connection = dataSource.getConnection()) {
+ return new DaoProvider(connection).getEpicsVariableDao().getEpicsVariables(epicsType);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -139,7 +174,11 @@
* @return the complete list of run numbers
*/
public List<Integer> getRuns() {
- return factory.getRunSummaryDao().getRuns();
+ try (Connection connection = dataSource.getConnection()) {
+ return new DaoProvider(connection).getRunSummaryDao().getRuns();
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -147,7 +186,11 @@
* @return the run summary for the current run
*/
public RunSummary getRunSummary() {
- return factory.getRunSummaryDao().getRunSummary(this.run);
+ try (Connection connection = dataSource.getConnection()) {
+ return new DaoProvider(connection).getRunSummaryDao().getRunSummary(this.run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -155,7 +198,11 @@
* @return the scaler data for the current run
*/
public List<ScalerData> getScalerData() {
- return factory.getScalerDataDao().getScalerData(this.run);
+ try (Connection connection = dataSource.getConnection()) {
+ return new DaoProvider(connection).getScalerDataDao().getScalerData(this.run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -163,7 +210,11 @@
* @return the SVT configuration data
*/
public List<SvtConfigData> getSvtConfigData() {
- return factory.getSvtConfigDao().getSvtConfigs(this.run);
+ try (Connection connection = dataSource.getConnection()) {
+ return new DaoProvider(connection).getSvtConfigDao().getSvtConfigs(this.run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -171,8 +222,13 @@
* @return the DAQ configuration for the run
*/
public DAQConfig getDAQConfig() {
- TriggerConfigData config = factory.getTriggerConfigDao().getTriggerConfig(this.run);
- return config.loadDAQConfig(this.run);
+ try (Connection connection = dataSource.getConnection()) {
+ TriggerConfigData config = new DaoProvider(connection).getTriggerConfigDao().getTriggerConfig(this.run);
+ return config.loadDAQConfig(this.run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+
}
/**
@@ -180,7 +236,11 @@
* @return <code>true</code> if the run exists in the database
*/
public boolean runExists() {
- return factory.getRunSummaryDao().runSummaryExists(this.run);
+ try (Connection connection = dataSource.getConnection()) {
+ return new DaoProvider(connection).getRunSummaryDao().runSummaryExists(this.run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -209,18 +269,22 @@
* @param replaceExisting <code>true</code> to allow an existing run summary to be replaced
*/
void updateRunSummary(RunSummary runSummary, boolean replaceExisting) {
- final RunSummaryDao runSummaryDao = factory.getRunSummaryDao();
- RunManager runManager = new RunManager();
- runManager.setRun(runSummary.getRun());
- if (runManager.runExists()) {
- if (replaceExisting) {
- runSummaryDao.updateRunSummary(runSummary);
+ try (Connection connection = dataSource.getConnection()) {
+ final RunSummaryDao runSummaryDao = new DaoProvider(connection).getRunSummaryDao();
+ RunManager runManager = new RunManager(dataSource);
+ runManager.setRun(runSummary.getRun());
+ if (runManager.runExists()) {
+ if (replaceExisting) {
+ runSummaryDao.updateRunSummary(runSummary);
+ } else {
+ throw new RuntimeException("Run already exists and replacement is not allowed.");
+ }
} else {
- throw new RuntimeException("Run already exists and replacement is not allowed.");
+ runSummaryDao.insertRunSummary(runSummary);
}
- } else {
- runSummaryDao.insertRunSummary(runSummary);
- }
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -229,15 +293,19 @@
* @param replaceExisting <code>true</code> to allow an existing trigger to be replaced
*/
void updateTriggerConfig(TriggerConfigData triggerConfig, boolean replaceExisting) {
- final TriggerConfigDao configDao = factory.getTriggerConfigDao();
- if (configDao.getTriggerConfig(run) != null) {
- if (replaceExisting) {
- configDao.deleteTriggerConfig(run);
- } else {
- throw new RuntimeException("Run already exists and replacement is not allowed.");
+ try (Connection connection = dataSource.getConnection()) {
+ final TriggerConfigDao configDao = new DaoProvider(connection).getTriggerConfigDao();
+ if (configDao.getTriggerConfig(run) != null) {
+ if (replaceExisting) {
+ configDao.deleteTriggerConfig(run);
+ } else {
+ throw new RuntimeException("Run already exists and replacement is not allowed.");
+ }
}
- }
- configDao.insertTriggerConfig(triggerConfig, run);
+ configDao.insertTriggerConfig(triggerConfig, run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
}
/**
@@ -245,8 +313,12 @@
* @param epicsData the EPICS data
*/
void updateEpicsData(List<EpicsData> epicsData) {
- if (epicsData != null && !epicsData.isEmpty()) {
- factory.getEpicsDataDao().insertEpicsData(epicsData, this.run);
+ try (Connection connection = dataSource.getConnection()) {
+ if (epicsData != null && !epicsData.isEmpty()) {
+ new DaoProvider(connection).getEpicsDataDao().insertEpicsData(epicsData, this.run);
+ }
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
}
}
@@ -255,22 +327,30 @@
* @param scalerData the scaler data
*/
void updateScalerData(List<ScalerData> scalerData) {
- if (scalerData != null) {
- factory.getScalerDataDao().insertScalerData(scalerData, this.run);
- }
- }
+ try (Connection connection = dataSource.getConnection()) {
+ if (scalerData != null) {
+ new DaoProvider(connection).getScalerDataDao().insertScalerData(scalerData, this.run);
+ }
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ }
/**
* Delete a run from the database.
* @param run the run number
*/
- void deleteRun() {
- factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_2S, run);
- factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_20S, run);
- factory.getScalerDataDao().deleteScalerData(run);
- factory.getSvtConfigDao().deleteSvtConfigs(run);
- factory.getTriggerConfigDao().deleteTriggerConfig(run);
- factory.getRunSummaryDao().deleteRunSummary(run);
- }
-
+ void deleteRun() {
+ try (Connection connection = dataSource.getConnection()) {
+ DaoProvider factory = new DaoProvider(connection);
+ factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_2S, run);
+ factory.getEpicsDataDao().deleteEpicsData(EpicsType.EPICS_20S, run);
+ factory.getScalerDataDao().deleteScalerData(run);
+ factory.getSvtConfigDao().deleteSvtConfigs(run);
+ factory.getTriggerConfigDao().deleteTriggerConfig(run);
+ factory.getRunSummaryDao().deleteRunSummary(run);
+ } catch (SQLException e) {
+ throw new RuntimeException(e);
+ }
+ }
}
Modified: java/branches/jeremy-dev2/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java
=============================================================================
--- java/branches/jeremy-dev2/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java (original)
+++ java/branches/jeremy-dev2/run-database/src/test/java/org/hps/run/database/RunBuilderTest.java Mon Mar 21 18:04:10 2016
@@ -56,7 +56,7 @@
dataBuilder.build();
// update in database
- RunManager runManager = new RunManager(CONNECTION_PARAMETERS.createConnection());
+ RunManager runManager = new RunManager();
runManager.updateRunSummary(runSummary, true);
runManager.updateTriggerConfig(configBuilder.getTriggerConfigData(), true);
runManager.updateEpicsData(dataBuilder.getEpicsData());
Modified: java/branches/jeremy-dev2/steering-files/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/steering-files/pom.xml (original)
+++ java/branches/jeremy-dev2/steering-files/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/steering-files/</url>
Modified: java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim
=============================================================================
--- java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim (original)
+++ java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/monitoring/EcalLedSequenceStandalone.lcsim Mon Mar 21 18:04:10 2016
@@ -1,43 +1,53 @@
- <lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
- xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
- <execute>
- <driver name="EventMarkerDriver"/>
- <driver name="EcalRunningPedestal"/>
- <driver name="EcalRawConverter" />
- <driver name="LedAnalysisDriver"/>
- <driver name="AidaSaveDriver"/>
- </execute>
- <drivers>
- <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
- <eventInterval>100000</eventInterval>
- </driver>
- <driver name="EcalRunningPedestal" type="org.hps.recon.ecal.EcalRunningPedestalDriver">
- <minLookbackEvents>10</minLookbackEvents>
- <maxLookbackEvents>50</maxLookbackEvents>
- </driver>
- <driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
- <ecalCollectionName>EcalCalHits</ecalCollectionName>
- <useTimestamps>false</useTimestamps>
- <useTruthTime>false</useTruthTime>
- <useRunningPedestal>true</useRunningPedestal>
- <useTimeWalkCorrection>true</useTimeWalkCorrection>
- <nsa>60</nsa> <!-- these are critical since the defaults in software are 100 - 20, as in prod. runs -->
- <nsb>16</nsb>
- </driver>
- <driver name="LedAnalysisDriver" type="org.hps.monitoring.ecal.plots.EcalLedSequenceMonitor">
- <isMonitoringApp>false</isMonitoringApp>
- <doFullAnalysis>false</doFullAnalysis>
- <skipMin>0.2</skipMin>
- <skipInitial>0.05</skipInitial>
- <useRawEnergy>true</useRawEnergy>
- <energyCut>2.0</energyCut>
- <nEventsMin>300</nEventsMin>
- <evnMinDraw>0.</evnMinDraw>
- <evnMaxDraw>80000.</evnMaxDraw>
- <saveTuple>false</saveTuple>
- </driver>
- <driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
- <outputFileName>${outputFile}.LedAnalysis.aida</outputFileName>
- </driver>
- </drivers>
- </lcsim>
+ <lcsim xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
+ xs:noNamespaceSchemaLocation="http://www.lcsim.org/schemas/lcsim/1.0/lcsim.xsd">
+ <execute>
+ <driver name="EventMarkerDriver"/>
+ <driver name="EcalRunningPedestal"/>
+ <driver name="EcalRawConverter" />
+ <driver name="LedAnalysisDriver"/>
+ <driver name="AidaSaveDriver"/>
+ </execute>
+ <drivers>
+ <driver name="EventMarkerDriver" type="org.lcsim.job.EventMarkerDriver">
+ <eventInterval>100000</eventInterval>
+ </driver>
+ <driver name="EcalRunningPedestal" type="org.hps.recon.ecal.EcalRunningPedestalDriver">
+ <minLookbackEvents>10</minLookbackEvents>
+ <maxLookbackEvents>50</maxLookbackEvents>
+ </driver>
+ <driver name="EcalRawConverter" type="org.hps.recon.ecal.EcalRawConverterDriver">
+ <ecalCollectionName>EcalCalHits</ecalCollectionName>
+ <useTimestamps>false</useTimestamps>
+ <useTruthTime>false</useTruthTime>
+ <useRunningPedestal>true</useRunningPedestal>
+ <useTimeWalkCorrection>true</useTimeWalkCorrection>
+ <nsa>60</nsa> <!-- these are critical since the defaults in software are 100 - 20, as in prod. runs -->
+ <nsb>16</nsb>
+ </driver>
+ <!-- <driver name="EcalEventDisplay" type="org.hps.monitoring.ecal.plots.EcalEventDisplay">
+ <inputCollection>EcalCalHits</inputCollection>
+ <inputCollectionRaw>EcalReadoutHits</inputCollectionRaw>
+ <inputClusterCollection>EcalClusters</inputClusterCollection>
+ <pedSamples>20</pedSamples>
+ <maxEch>15.0</maxEch>
+ <minEch>0.005</minEch>
+ <eventRefreshRate>2</eventRefreshRate>
+ </driver>
+ -->
+ <driver name="LedAnalysisDriver" type="org.hps.monitoring.ecal.plots.EcalLedSequenceMonitor">
+ <isMonitoringApp>false</isMonitoringApp>
+ <doFullAnalysis>false</doFullAnalysis>
+ <skipMin>0.2</skipMin>
+ <skipInitial>0.05</skipInitial>
+ <useRawEnergy>true</useRawEnergy>
+ <energyCut>2.0</energyCut>
+ <nEventsMin>300</nEventsMin>
+ <evnMinDraw>0.</evnMinDraw>
+ <evnMaxDraw>80000.</evnMaxDraw>
+ <saveTuple>false</saveTuple>
+ </driver>
+ <driver name="AidaSaveDriver" type="org.lcsim.job.AidaSaveDriver">
+ <outputFileName>${outputFile}.LedAnalysis.aida</outputFileName>
+ </driver>
+ </drivers>
+ </lcsim>
Modified: java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim
=============================================================================
--- java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim (original)
+++ java/branches/jeremy-dev2/steering-files/src/main/resources/org/hps/steering/production/DataQualityRecon.lcsim Mon Mar 21 18:04:10 2016
@@ -19,7 +19,9 @@
<driver name="TrackingMonitoringGBL"/>
<driver name="FinalStateMonitoringGBL"/>
<driver name="V0MonitoringGBL"/>
- <driver name="TridentMonitoringGBL"/>
+ <driver name="TridentMonitoringGBL"/>
+ <driver name="MuonCandidateMonitoring"/>
+ <driver name="MuonCandidateMonitoringGBL"/>
<!-- Singles0 -->
<driver name="EcalMonitoringSingles0"/>
<driver name="EcalMonitoringCorrSingles0"/>
@@ -87,6 +89,14 @@
<driver name="EcalMonitoring" type="org.hps.analysis.dataquality.EcalMonitoring">
<triggerType>all</triggerType>
</driver>
+ <driver name="MuonCandidateMonitoringGBL" type="org.hps.analysis.dataquality.MuonCandidateMonitoring">
+ <triggerType>all</triggerType>
+ <isGBL>true</isGBL>
+ </driver>
+ <driver name="MuonCandidateMonitoring" type="org.hps.analysis.dataquality.MuonCandidateMonitoring">
+ <triggerType>all</triggerType>
+ <isGBL>false</isGBL>
+ </driver>
<driver name="EcalMonitoringCorr" type="org.hps.analysis.dataquality.EcalMonitoring">
<triggerType>all</triggerType>
<clusterCollectionName>EcalClustersCorr</clusterCollectionName>
Modified: java/branches/jeremy-dev2/tracking/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/tracking/pom.xml (original)
+++ java/branches/jeremy-dev2/tracking/pom.xml Mon Mar 21 18:04:10 2016
@@ -8,7 +8,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/tracking/</url>
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/readout/svt/FpgaData.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/readout/svt/FpgaData.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/readout/svt/FpgaData.java Mon Mar 21 18:04:10 2016
@@ -39,8 +39,9 @@
/**
*
- * @param temperature : array containing hybrid temperatures
- * @param tail : word present at the end of a FPGA data set
+ * @param fpgaID FPGA ID
+ * @param data : array containing hybrid temperatures
+ * @param tail : word present at the end of a FPGA data set
*/
public FpgaData(int fpgaID, int[] data, int tail) {
this.fpgaID = fpgaID;
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/HitTimeTrackCheck.java Mon Mar 21 18:04:10 2016
@@ -28,9 +28,15 @@
this.rmsTimeCut = rmsTimeCut;
}
+ public void setDebug(boolean debug) {
+ this.debug = debug;
+ }
+
@Override
public boolean checkSeed(SeedCandidate candidate) {
-// System.out.format("seed with %d hits\n", candidate.getHits().size());
+ if (debug) {
+ System.out.format("%s: seed with %d hits\n", this.getClass().getSimpleName(), candidate.getHits().size());
+ }
int nStrips = 0;
double meanTime = 0;
for (HelicalTrackHit hth : candidate.getHits()) {
@@ -50,15 +56,16 @@
}
// if (nStrips<6) return true;
seedsChecked++;
-// rmsTime = Math.sqrt(rmsTime / nStrips);
-// System.out.format("seed RMS %f on %d hits\n",rmsTime,nStrips);
+ if (debug) {
+ System.out.format("%s: seed RMS %f on %d hits\n", this.getClass().getSimpleName(), Math.sqrt(rmsTime / nStrips), nStrips);
+ }
boolean passCheck = (rmsTime < minTrackHits * rmsTimeCut * rmsTimeCut);
// boolean passCheck = (rmsTime < minTrackHits * rmsTimeCut);
if (passCheck) {
seedsPassed++;
}
if (debug && seedsChecked % 10000 == 0) {
- System.out.format("Checked %d seeds, %d passed (%d failed)\n", seedsChecked, seedsPassed, seedsChecked - seedsPassed);
+ System.out.format("%s: Checked %d seeds, %d passed (%d failed)\n", this.getClass().getSimpleName(), seedsChecked, seedsPassed, seedsChecked - seedsPassed);
}
return passCheck;
@@ -66,7 +73,9 @@
@Override
public boolean checkTrack(SeedTrack track) {
-// System.out.format("track with %d hits\n", track.getTrackerHits().size());
+ if (debug) {
+ System.out.format("%s: track with %d hits\n", this.getClass().getSimpleName(), track.getTrackerHits().size());
+ }
tracksChecked++;
int nStrips = 0;
double meanTime = 0;
@@ -87,13 +96,15 @@
}
rmsTime = Math.sqrt(rmsTime / nStrips);
// rmsTime = rmsTime / nStrips;
-// System.out.format("track RMS %f on %d hits\n", rmsTime, nStrips);
+ if (debug) {
+ System.out.format("%s: track RMS %f on %d hits\n", this.getClass().getSimpleName(), rmsTime, nStrips);
+ }
boolean passCheck = (rmsTime < rmsTimeCut);
if (passCheck) {
tracksPassed++;
}
if (debug && tracksChecked % 100 == 0) {
- System.out.format("Checked %d tracks, %d passed (%d failed)\n", tracksChecked, tracksPassed, tracksChecked - tracksPassed);
+ System.out.format("%s: Checked %d tracks, %d passed (%d failed)\n", this.getClass().getSimpleName(), tracksChecked, tracksPassed, tracksChecked - tracksPassed);
}
return passCheck;
}
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/ShaperLinearFitAlgorithm.java Mon Mar 21 18:04:10 2016
@@ -34,6 +34,7 @@
private final int nPulses;
final double[] amplitudes;
final double[] amplitudeErrors;
+ private double pedestal;
//===> private ChannelConstants channelConstants;
private HpsSiSensor sensor;
private int channel;
@@ -44,6 +45,7 @@
private int nUsedSamples;
private int firstFittedPulse;
private int nFittedPulses;
+ private boolean fitPedestal = false;
private boolean debug = false;
private static final Logger minuitLoggger = Logger.getLogger("org.freehep.math.minuit");
@@ -61,6 +63,18 @@
} else {
minuitLoggger.setLevel(Level.OFF);
}
+ }
+
+ public void setFitPedestal(boolean fitPedestal) {
+ this.fitPedestal = fitPedestal;
+ }
+
+ public boolean fitsPedestal() {
+ return fitPedestal;
+ }
+
+ public double getPedestal() {
+ return pedestal;
}
@Override
@@ -120,7 +134,11 @@
ShapeFitParameters fit = new ShapeFitParameters();
fit.setAmp(amplitudes[i]);
fit.setAmpErr(amplitudeErrors[i]);
- fit.setChiProb(Gamma.regularizedGammaQ(samples.length - 2 * nPulses, chisq));
+ if (fitPedestal) {
+ fit.setChiProb(Gamma.regularizedGammaQ(samples.length - 2 * nPulses - 1, chisq));
+ } else {
+ fit.setChiProb(Gamma.regularizedGammaQ(samples.length - 2 * nPulses, chisq));
+ }
fit.setT0(min.userState().value(i));
@@ -177,15 +195,22 @@
nUsedSamples = split;
//fit only the first pulse
nFittedPulses = 1;
- FunctionMinimum frontFit = doRecursiveFit(fitData);
+ FunctionMinimum frontFit;
+ frontFit = doRecursiveFit(fitData);
if (debug) {
- System.out.format("front fit:\tt0=%f,\tA=%f,\tchisq=%f\n", frontFit.userState().value(0), amplitudes[firstFittedPulse], frontFit.fval());
- }
-
+ if (fitPedestal) {
+ System.out.format("front fit:\tt0=%f,\tA=%f,\tchisq=%f,\tpedestal=%f\n", frontFit.userState().value(0), amplitudes[firstFittedPulse], frontFit.fval(), pedestal);
+ } else {
+ System.out.format("front fit:\tt0=%f,\tA=%f,\tchisq=%f\n", frontFit.userState().value(0), amplitudes[firstFittedPulse], frontFit.fval());
+ }
+ }
//subtract first pulse from fit input
for (int i = 0; i < samples.length; i++) {
//===> fitData[i] -= amplitudes[firstFittedPulse] * getAmplitude(HPSSVTConstants.SAMPLING_INTERVAL * i - frontFit.userState().value(0), channelConstants);
fitData[i] -= amplitudes[firstFittedPulse] * shape.getAmplitudePeakNorm(HPSSVTConstants.SAMPLING_INTERVAL * i - frontFit.userState().value(0));
+ if (fitPedestal) {
+ fitData[i] -= pedestal;
+ }
}
if (debug) {
@@ -201,7 +226,14 @@
//fit the rest of the pulses
firstFittedPulse++;
nFittedPulses = nPulses - firstFittedPulse;
- FunctionMinimum backFit = doRecursiveFit(fitData);
+ FunctionMinimum backFit;
+ if (fitPedestal) {
+ fitPedestal = false;
+ backFit = doRecursiveFit(fitData);
+ fitPedestal = true;
+ } else {
+ backFit = doRecursiveFit(fitData);
+ }
if (debug) {
System.out.format("back fit:\tt0=%f,\tA=%f,\tchisq=%f\n", backFit.userState().value(0), amplitudes[firstFittedPulse], backFit.fval());
@@ -221,7 +253,11 @@
FunctionMinimum combinedFit = minuitFit(combinedGuess);
if (debug) {
- System.out.format("combined fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", combinedFit.userState().value(0), amplitudes[firstFittedPulse], combinedFit.userState().value(1), amplitudes[firstFittedPulse + 1], combinedFit.fval());
+ if (fitPedestal) {
+ System.out.format("combined fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f,\tpedestal=%f\n", combinedFit.userState().value(0), amplitudes[firstFittedPulse], combinedFit.userState().value(1), amplitudes[firstFittedPulse + 1], combinedFit.fval(), pedestal);
+ } else {
+ System.out.format("combined fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", combinedFit.userState().value(0), amplitudes[firstFittedPulse], combinedFit.userState().value(1), amplitudes[firstFittedPulse + 1], combinedFit.fval());
+ }
}
double newchisq = evaluateMinimum(combinedFit);
@@ -234,8 +270,11 @@
// double newchisq = evaluateMinimum(bestFit);
if (debug) {
- System.out.println("new chisq:\t" + bestChisq);
- System.out.format("best fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", bestFit.userState().value(0), amplitudes[firstFittedPulse], bestFit.userState().value(1), amplitudes[firstFittedPulse + 1], bestFit.fval());
+ if (fitPedestal) {
+ System.out.format("best fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f,\tpedestal=%f\n", bestFit.userState().value(0), amplitudes[firstFittedPulse], bestFit.userState().value(1), amplitudes[firstFittedPulse + 1], bestFit.fval(), pedestal);
+ } else {
+ System.out.format("best fit:\tt0=%f,\tA=%f,\tt0=%f,\tA=%f,\tchisq=%f\n", bestFit.userState().value(0), amplitudes[firstFittedPulse], bestFit.userState().value(1), amplitudes[firstFittedPulse + 1], bestFit.fval());
+ }
}
return bestFit;
}
@@ -322,7 +361,8 @@
if (times.length != nFittedPulses) {
throw new RuntimeException("wrong number of parameters in doLinFit");
}
- RealMatrix sc_mat = new Array2DRowRealMatrix(nFittedPulses, nUsedSamples);
+ int nAmplitudes = fitPedestal ? nFittedPulses + 1 : nFittedPulses;
+ RealMatrix sc_mat = new Array2DRowRealMatrix(nAmplitudes, nUsedSamples);
RealVector y_vec = new ArrayRealVector(nUsedSamples);
RealVector var_vec = new ArrayRealVector(nUsedSamples);
@@ -330,6 +370,9 @@
for (int i = 0; i < nFittedPulses; i++) {
//===> sc_mat.setEntry(i, j, getAmplitude(HPSSVTConstants.SAMPLING_INTERVAL * (firstUsedSample + j) - times[i], channelConstants) / sigma[firstUsedSample + j]);
sc_mat.setEntry(i, j, shape.getAmplitudePeakNorm(HPSSVTConstants.SAMPLING_INTERVAL * (firstUsedSample + j) - times[i]) / sigma[firstUsedSample + j]);
+ }
+ if (fitPedestal) {
+ sc_mat.setEntry(nFittedPulses, j, 1.0 / sigma[firstUsedSample + j]);
}
y_vec.setEntry(j, y[firstUsedSample + j] / sigma[firstUsedSample + j]);
var_vec.setEntry(j, sigma[firstUsedSample + j] * sigma[firstUsedSample + j]);
@@ -344,7 +387,7 @@
a_solver = a_cholesky.getSolver();
solved_amplitudes = a_solver.solve(a_vec);
amplitude_err = a_solver.solve(sc_mat.operate(var_vec));
- if (solved_amplitudes.getMinValue() < 0) {
+ if (solved_amplitudes.getSubVector(0, nFittedPulses).getMinValue() < 0) {
goodFit = false;
}
} catch (NonPositiveDefiniteMatrixException e) {
@@ -352,8 +395,8 @@
}
if (!goodFit) {
- solved_amplitudes = new ArrayRealVector(nFittedPulses, 0.0);
- amplitude_err = new ArrayRealVector(nFittedPulses, Double.POSITIVE_INFINITY);
+ solved_amplitudes = new ArrayRealVector(nAmplitudes, 0.0);
+ amplitude_err = new ArrayRealVector(nAmplitudes, Double.POSITIVE_INFINITY);
}
double chisq = y_vec.subtract(sc_mat.preMultiply(solved_amplitudes)).getNorm();
@@ -361,6 +404,9 @@
for (int i = 0; i < nFittedPulses; i++) {
amplitudes[firstFittedPulse + i] = solved_amplitudes.getEntry(i);
amplitudeErrors[firstFittedPulse + i] = Math.sqrt(amplitude_err.getEntry(i));
+ }
+ if (fitPedestal) {
+ pedestal = solved_amplitudes.getEntry(nFittedPulses);
}
return chisq;
}
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/StrategyType.java Mon Mar 21 18:04:10 2016
@@ -4,7 +4,7 @@
import java.util.Map;
/**
- * Enum constants for different {@link Track}s based on what tracking
+ * Enum constants for different {@link org.lcsim.event.Track} objects based on what tracking
* strategy was used. The type is defined by comparing the tracking strategy
* name to the name of all the enum constants.
*
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackDataDriver.java Mon Mar 21 18:04:10 2016
@@ -23,8 +23,8 @@
import org.lcsim.util.Driver;
/**
- * Driver used to persist additional {@link Track} information via a
- * {@link GenericObject}.
+ * Driver used to persist additional {@link org.lcsim.event.Track} information via a
+ * {@link org.lcsim.event.GenericObject} collection.
*
* @author Omar Moreno, UCSC
* @author Sho Uemura, SLAC
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackQualityData.java Mon Mar 21 18:04:10 2016
@@ -17,10 +17,7 @@
private final double[] doubles;
/**
- * Default Ctor
- *
- * @param trackerVolume : The SVT volume to which the track used to
- * calculate the residuals corresponds to.
+ * Default Ctor
*/
public TrackQualityData() {
doubles = new double[2];
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackUtils.java Mon Mar 21 18:04:10 2016
@@ -78,7 +78,7 @@
*
* @param track
* @param x
- * @return
+ * @return the position along the x-axis
*/
public static Hep3Vector extrapolateHelixToXPlane(Track track, double x) {
return extrapolateHelixToXPlane(getHTF(track), x);
@@ -153,9 +153,9 @@
/**
* Extrapolate helix to a position along the x-axis. Re-use HelixUtils.
*
- * @param track
+ * @param htf
* @param x
- * @return
+ * @return the position along the x-axis
*/
public static Hep3Vector extrapolateHelixToXPlane(HelicalTrackFit htf, double x) {
double s = HelixUtils.PathToXPlane(htf, x, 0., 0).get(0);
@@ -346,7 +346,7 @@
*
* @param track - to be extrapolated
* @param z
- * @return
+ * @return extrapolated position
*/
public static Hep3Vector extrapolateTrack(Track track, double z) {
return extrapolateTrack(track.getTrackStates().get(0),z);
@@ -357,7 +357,7 @@
*
* @param track - to be extrapolated
* @param z
- * @return
+ * @return extrapolated position
*/
public static Hep3Vector extrapolateTrack(TrackState track, double z) {
@@ -393,10 +393,8 @@
/**
* Extrapolate track to given position, using dipole position from geometry.
*
- * @param helix - to be extrapolated
- * @param track - position along the x-axis of the helix in lcsim
- * coordiantes
- * @return
+ * @param track - position along the x-axis of the helix in lcsim coordinates
+ * @return extrapolated position
*/
public static Hep3Vector extrapolateTrack(Track track, double z, Detector detector) {
@@ -439,7 +437,7 @@
*
* @param helix - to be extrapolated
* @param z - position along the x-axis of the helix in lcsim coordiantes
- * @return
+ * @return the extrapolated position
*/
public static Hep3Vector extrapolateTrack(HelicalTrackFit helix, double z) {
SeedTrack trk = new SeedTrack();
@@ -1161,7 +1159,7 @@
* @param trk
* @param hitToStrips
* @param hitToRotated
- * @return
+ * @return isolations for all 12 strip layers
*/
public static Double[] getIsolations(Track trk, RelationalTable hitToStrips, RelationalTable hitToRotated) {
Double[] isolations = new Double[12];
@@ -1333,7 +1331,7 @@
* @param r0
* @param q
* @param B
- * @return
+ * @return the created trajectory
*/
public static Trajectory getTrajectory(Hep3Vector p0, org.lcsim.spacegeom.SpacePoint r0, double q, double B) {
SpaceVector p = new CartesianVector(p0.v());
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/TrackerReconDriver.java Mon Mar 21 18:04:10 2016
@@ -35,7 +35,7 @@
public final class TrackerReconDriver extends Driver {
private static final Logger LOGGER = Logger.getLogger(TrackerReconDriver.class.getPackage().getName());
-
+
// Debug flag.
private boolean debug = false;
// Tracks found across all events.
@@ -143,10 +143,8 @@
// Get B-field Y with no sign. Seed Tracker doesn't like signed B-field components.
// FIXME Is this always right?
-// this.bfield = Math.abs((detector.getFieldMap().getField(new BasicHep3Vector(0, 0, 0)).y()));
- double zvalInTracker=500.0;//50cm...about the middle
- Hep3Vector fieldInTracker=detector.getFieldMap().getField(new BasicHep3Vector(0, 0, zvalInTracker));
- LOGGER.config("fieldInTracker at "+zvalInTracker+": Bx = "+fieldInTracker.x()+"; By = "+fieldInTracker.y()+"; Bz = "+fieldInTracker.z());
+ Hep3Vector fieldInTracker = TrackUtils.getBField(detector);
+ LOGGER.config("fieldInTracker: Bx = " + fieldInTracker.x() + "; By = " + fieldInTracker.y() + "; Bz = " + fieldInTracker.z());
this.bfield = Math.abs(fieldInTracker.y());
LOGGER.config(String.format("%s: Set B-field to %.6f\n", this.getClass().getSimpleName(), this.bfield));
@@ -184,7 +182,9 @@
add(stFinal);
if (rmsTimeCut > 0) {
- stFinal.setTrackCheck(new HitTimeTrackCheck(rmsTimeCut));
+ HitTimeTrackCheck timeCheck = new HitTimeTrackCheck(rmsTimeCut);
+ timeCheck.setDebug(debug);
+ stFinal.setTrackCheck(timeCheck);
}
}
@@ -301,7 +301,7 @@
public void endOfData() {
if (debug) {
System.out.println("-------------------------------------------");
- System.out.println(this.getName() + " found " + ntracks + " tracks in " + nevents + " events which is " + ((double) ntracks / (double) nevents) + " tracks per event.");
+ System.out.println(this.getName() + " with strategy " + strategyResource + " found " + ntracks + " tracks in " + nevents + " events which is " + ((double) ntracks / (double) nevents) + " tracks per event.");
}
}
}
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/WTrack.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/WTrack.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/WTrack.java Mon Mar 21 18:04:10 2016
@@ -15,10 +15,9 @@
* Track representation based on paper
* Paul Avery, CBX 98-39, June 9, 1998
*
- * Used primarily for the algorithm to intersect a helix with a generic plane in space.
+ * Used primarily for the algorithm to intersect a helix with a generic plane in space.
*
* @author phansson <[log in to unmask]>
-
*/
public class WTrack {
@@ -29,7 +28,6 @@
private boolean _debug = false;
private final int max_iterations_intercept = 10;
private final double epsilon_intercept = 1e-4;
-
/**
* Constructor. Assumes that b-field is in detector z direction.
@@ -189,7 +187,7 @@
* Get point on helix at path length s in arbitrary oriented, constant magnetic field with unit vector h
* @param s - path length
* @param h - magnetic field unit vector
- * @return
+ * @return get a 3D point along the helix
*/
private Hep3Vector getPointOnHelix(double s, Hep3Vector h) {
WTrack track = this;
@@ -266,11 +264,9 @@
* @param xp point on the plane
* @param eta unit vector of the plane
* @param h unit vector of magnetic field
- * @return
+ * @return the intersection point of the helix with the plane
*/
public Hep3Vector getHelixAndPlaneIntercept(Hep3Vector xp, Hep3Vector eta, Hep3Vector h) {
-
-
int iteration = 1;
double s_total = 0.;
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/axial/HelicalTrack2DHit.java Mon Mar 21 18:04:10 2016
@@ -11,8 +11,8 @@
* This class is explicitly for HPS where the length of the
* sensors are (mostly) along the detector
* y-dimension ( == HelicalTrackFit x-dimension);
+ * Copied/Modified from org.lcsim.recon.tracking.helicaltrack.HelicalTrack2DHit.java
* @author Matt Graham <[log in to unmask]>
- * Copied/Modified from org.lcsim.recon.tracking.helicaltrack.HelicalTrack2DHit.java
*/
public class HelicalTrack2DHit extends HelicalTrackHit {
private double _axmin;//min value along the bend-direction..
@@ -27,8 +27,6 @@
* @param dEdx deposited energy
* @param time hit time
* @param rawhits list of raw hits
- * @param axmin minimum z for the strip
- * @param axmax maximum z for the strip
* @param detname detector name
* @param layer layer number
* @param beflag
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/FittedGblTrajectory.java Mon Mar 21 18:04:10 2016
@@ -88,7 +88,7 @@
/**
* Find the index (or label) of the GBL point on the trajectory from the {@link GBLPOINT}.
* @param point
- * @return
+ * @return the index of the GBL point on the trajectory from the enum
*/
public int getPointIndex(GBLPOINT point) {
int gblPointIndex;
@@ -196,7 +196,7 @@
* @param htf - helix to be corrected
* @param point - {@link GBLPOINT} on the trajectory
* @param bfield - magnitude of B-field.
- * @return
+ * @return the corrected perigee parameters and covariance matrix
*/
public Pair<double[], SymmetricMatrix> getCorrectedPerigeeParameters(HelicalTrackFit htf, GBLPOINT point, double bfield) {
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLStripClusterData.java Mon Mar 21 18:04:10 2016
@@ -81,7 +81,7 @@
}
/**
- * @param set track id to val
+ * @param val set track id to val
*/
public void setId(int val) {
bank_int[GBLINT.ID] = val;
@@ -193,7 +193,7 @@
/**
* Set track direction at this cluster
*
- * @param tDir
+ * @param v the track direction
*/
public void setTrackDir(Hep3Vector v) {
bank_double[GBLDOUBLE.TDIRX] = v.x();
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GBLTrackData.java Mon Mar 21 18:04:10 2016
@@ -60,7 +60,7 @@
}
/**
- * @param set track id to val
+ * @param val track ID value
*/
public void setTrackId(int val) {
bank_int[GBLINT.ID] = val;
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/GblUtils.java Mon Mar 21 18:04:10 2016
@@ -316,17 +316,13 @@
throw new UnsupportedOperationException("Should not happen. This problem is only solved with the MaterialSupervisor.");
}
}
-
-
-
-
/**
* Calculate the Jacobian from Curvilinear to Perigee frame.
* @param helicalTrackFit - original helix
* @param helicalTrackFitAtIPCorrected - corrected helix at this point
* @param bfield - magnitude of B-field
- * @return
+ * @return the Jacobian matrix from Curvilinear to Perigee frame
*/
public static Matrix getCLToPerigeeJacobian(HelicalTrackFit helicalTrackFit, HpsHelicalTrackFit helicalTrackFitAtIPCorrected, double bfield) {
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/HelicalTrackStripGbl.java Mon Mar 21 18:04:10 2016
@@ -13,10 +13,8 @@
import org.lcsim.fit.helicaltrack.HelicalTrackStrip;
/**
- * Encapsulates the {@HelicalTrackStrip} to make sure that the local unit vectors are
+ * Encapsulates the {@link org.lcsim.fit.helicaltrack.HelicalTrackStrip} to make sure that the local unit vectors are
* coming from the underlying geometry.
- *
- * I think the base calss should change but whatever.
*
* @author Per Hansson Adrian <[log in to unmask]>
*
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/gbl/MakeGblTracks.java Mon Mar 21 18:04:10 2016
@@ -148,7 +148,7 @@
* @param _scattering - estimation of the multiple scattering {@link MultipleScattering}.
* @param bfield - magnitude of B-field.
* @param debug - debug flag.
- * @return
+ * @return the fitted GBL trajectory
*/
public static FittedGblTrajectory doGBLFit(HelicalTrackFit htf, List<TrackerHit> stripHits, MultipleScattering _scattering, double bfield, int debug) {
List<GBLStripClusterData> stripData = makeStripData(htf, stripHits, _scattering, bfield, debug);
@@ -165,7 +165,7 @@
* @param _scattering
* @param _B
* @param _debug
- * @return
+ * @return the list of GBL strip cluster data
*/
public static List<GBLStripClusterData> makeStripData(HelicalTrackFit htf, List<TrackerHit> stripHits, MultipleScattering _scattering, double _B, int _debug) {
List<GBLStripClusterData> stripClusterDataList = new ArrayList<GBLStripClusterData>();
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/straight/STUtils.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/straight/STUtils.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/recon/tracking/straight/STUtils.java Mon Mar 21 18:04:10 2016
@@ -406,7 +406,7 @@
* @param origin
* @param strip1
* @param strip2
- * @return
+ * @return the stereo hit position
*/
static Hep3Vector getStereoHitPositionFromReference(Hep3Vector origin, SiTrackerHitStrip1D strip1, SiTrackerHitStrip1D strip2) {
SiTrackerHitStrip1D s1 = strip1;
@@ -499,7 +499,6 @@
* @param origin
* @param strip1
* @param strip2
- * @return
*/
static Hep3Vector getPosition(Hep3Vector t, SiTrackerHitStrip1D strip1, SiTrackerHitStrip1D strip2) {
SiTrackerHitStrip1D s1 = strip1;
@@ -592,7 +591,8 @@
}
/**
- * Fit the {@link STStereoTrack} track with the supplied {@link STTrackFitter} and add the fit to the track.
+ * Fit the {@link STUtils.STStereoTrack} track with the supplied {@link STUtils.STTrackFitter}
+ * and add the fit to the track.
* @param regressionFitter
* @param track
*/
@@ -741,7 +741,7 @@
* Calculate the multiple scattering angle for a given momentum and thickness
* @param p
* @param radlength
- * @return
+ * @return the multiple scattering angle for a given momentum and thickness
*/
public static double msangle(double p, double radlength) {
double angle = (0.0136 / p) * Math.sqrt(radlength) * (1.0 + 0.038 * Math.log(radlength));
@@ -781,10 +781,11 @@
}
/**
- * Finds point of intercept between a {@link STStereoTrack} and a sensor obtained from a {@link SiTrackerHitStrip1D}.
+ * Finds point of intercept between a {@link STUtils.STStereoTrack} and a sensor obtained from a
+ * {@link org.lcsim.recon.tracking.digitization.sisim.SiTrackerHitStrip1D}.
* @param strip
* @param track
- * @return point of intercept.
+ * @return the point of intercept
*/
private static Hep3Vector getLinePlaneIntercept(SiTrackerHitStrip1D strip, STStereoTrack track) {
// line description
@@ -797,14 +798,13 @@
Hep3Vector trkpos = getLinePlaneIntercept(l, l0, p0, n);
logger.finest("\ntrkpos " + trkpos.toString() + "\n l " + l.toString() + "\n l0 " + l0.toString() + "\n p0 " + p0.toString() + "\n n " + n.toString());
return trkpos;
- }
-
+ }
/**
* Calculate the residual (measured - predicted) for this hit in the measurement frame.
* @param strip
* @param track
- * @return
+ * @return the residual (measured - predicted) for the hit
*/
protected static double getUResidual(SiTrackerHitStrip1D strip, STStereoTrack track) {
@@ -864,7 +864,7 @@
* Path length to this point along the {@link STStereoTrack}.
* @param z
* @param track
- * @return
+ * @return path length to the point along the track
*/
private static double getPathLength(double z, STStereoTrack track) {
final double C = z / track.getDirection().z();
@@ -876,7 +876,7 @@
/**
- * Get a vector of track parameters for a {@link STStereoTrack} in the tracking frame.
+ * Get a vector of track parameters for a {@link STUtils.STStereoTrack} in the tracking frame.
* @param track
* @return array of intercept YX, intercept ZX, slope YX and slope ZX.
*/
@@ -1037,7 +1037,7 @@
}
/**
- * Get curvilinear track parameters for this {@link STStereoTrack}.
+ * Get curvilinear track parameters for this {@link STUtils.STStereoTrack}.
* @param track
* @return array of track parameters
*/
@@ -1089,14 +1089,5 @@
double clPars[] = new double[]{1.0/p, lambda, phi, xT, yT,};
return clPars;
- }
-
-
-
-
-
-
-
-
-
+ }
}
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/BuildCompact.java Mon Mar 21 18:04:10 2016
@@ -49,8 +49,7 @@
* Class building a new compact.xml detector based on MillepedeII input
* corrections.
*
- * @author Per Ola Hansson Adrian <[log in to unmask]>
- * @date January 15, 2014
+ * @author Per Ola Hansson Adrian <[log in to unmask]>
*/
public class BuildCompact {
Modified: java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java
=============================================================================
--- java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java (original)
+++ java/branches/jeremy-dev2/tracking/src/main/java/org/hps/svt/alignment/HPSStrips.java Mon Mar 21 18:04:10 2016
@@ -255,7 +255,7 @@
* Capacitance for a particular cell. Units are pF.
*
* @param cell_id
- * @return
+ * @return the capacitance for the cell (pF)
*/
public double getCapacitance(int cell_id) // capacitance in pF
{
@@ -266,7 +266,7 @@
* Nominal capacitance used for throwing random noise in the sensor.
* Calculated using middle strip. Units are pF.
*
- * @return
+ * @return the nominal capacitance used for noise generation in the sensor (pF)
*/
public double getCapacitance() {
return getCapacitance(getNCells(0) / 2);
Modified: java/branches/jeremy-dev2/users/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/users/pom.xml (original)
+++ java/branches/jeremy-dev2/users/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/users/</url>
Modified: java/branches/jeremy-dev2/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java
=============================================================================
--- java/branches/jeremy-dev2/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java (original)
+++ java/branches/jeremy-dev2/users/src/main/java/org/hps/users/meeg/SvtChargeIntegrator.java Mon Mar 21 18:04:10 2016
@@ -6,17 +6,19 @@
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
@@ -49,6 +51,7 @@
options.addOption(new Option("r", false, "use per-run CSV log file (default is per-file)"));
options.addOption(new Option("t", false, "use TI timestamp instead of Unix time (higher precision, but requires TI time offset in run DB)"));
options.addOption(new Option("c", false, "get TI time offset from CSV log file instead of run DB"));
+ options.addOption(new Option("e", true, "header error file"));
final CommandLineParser parser = new PosixParser();
CommandLine cl = null;
@@ -62,6 +65,26 @@
boolean useTI = cl.hasOption("t");
boolean useCrawlerTI = cl.hasOption("c");
+ Map<Integer, Long> runErrorMap = new HashMap<Integer, Long>();
+ if (cl.hasOption("e")) {
+ try {
+ BufferedReader br = new BufferedReader(new FileReader(cl.getOptionValue("e")));
+ String line;
+ System.err.println("header error file header: " + br.readLine()); //discard the first line
+ while ((line = br.readLine()) != null) {
+ String arr[] = line.split(" +");
+ int run = Integer.parseInt(arr[1]);
+ long errorTime = Long.parseLong(arr[4]);
+ runErrorMap.put(run, errorTime);
+// System.out.format("%d %d\n", run, errorTime);
+ }
+ } catch (FileNotFoundException ex) {
+ Logger.getLogger(SvtChargeIntegrator.class.getName()).log(Level.SEVERE, null, ex);
+ } catch (IOException ex) {
+ Logger.getLogger(SvtChargeIntegrator.class.getName()).log(Level.SEVERE, null, ex);
+ }
+ }
+
if (cl.getArgs().length != 2) {
printUsage(options);
return;
@@ -89,9 +112,9 @@
String line;
System.err.println("myaData header: " + br.readLine()); //discard the first line
if (perRun) {
- System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgoodQ\tgoodQ_withbias\tgoodQ_atnom");
+ System.out.println("run_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
} else {
- System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgoodQ\tgoodQ_withbias\tgoodQ_atnom");
+ System.out.println("run_num\tfile_num\tnominal_position\tnEvents\ttotalQ\ttotalQ_withbias\ttotalQ_atnom\ttotalQ_noerror\tgatedQ\tgatedQ_withbias\tgatedQ_atnom\tgatedQ_noerror\tgoodQ\tgoodQ_withbias\tgoodQ_atnom\tgoodQ_noerror");
}
int currentRun = 0;
@@ -129,9 +152,6 @@
if (tiTimeOffset == 0) {
continue;
}
- if (tiTimeOffset == 0) {
- continue;
- }
}
try {
@@ -212,25 +232,40 @@
if (firstTI == 0 || lastTI == 0) {
continue;
}
- startDate = new Date((long) ((firstTI + tiTimeOffset) / 1e6));
- endDate = new Date((long) ((lastTI + tiTimeOffset) / 1e6));
+ startDate = new Date((firstTI + tiTimeOffset) / 1000000);
+ endDate = new Date((lastTI + tiTimeOffset) / 1000000);
} else {
if (firstTime == 0 || lastTime == 0) {
continue;
}
startDate = new Date(firstTime * 1000);
endDate = new Date(lastTime * 1000);
+ }
+
+ Long errorTime = runErrorMap.get(runNum);
+ Date errorDate = null;
+ if (errorTime != null) {
+ errorDate = new Date(errorTime / 1000000);
+ boolean isGood = Math.abs(errorDate.getTime() - startDate.getTime()) < 10 * 60 * 60 * 1000; //10 hours
+ if (!isGood && useTI) {
+ errorDate = new Date((errorTime + tiTimeOffset) / 1000000);
+// boolean isPlusOffsetGood = Math.abs(errorDatePlusOffset.getTime() - startDate.getTime()) < 10 * 60 * 60 * 1000; //10 hours
+// System.out.format("%d, %d, %d: %s (good: %b), %s (good: %b)\n", runNum, errorTime, tiTimeOffset, errorDate, isGood, errorDatePlusOffset, isPlusOffsetGood);
+ }
}
double totalCharge = 0;
double totalChargeWithBias = 0;
double totalChargeWithBiasAtNominal = 0;
+ double totalChargeWithBiasAtNominalNoError = 0;
double totalGatedCharge = 0;
double totalGatedChargeWithBias = 0;
double totalGatedChargeWithBiasAtNominal = 0;
+ double totalGatedChargeWithBiasAtNominalNoError = 0;
double totalGoodCharge = 0;
double totalGoodChargeWithBias = 0;
double totalGoodChargeWithBiasAtNominal = 0;
+ double totalGoodChargeWithBiasAtNominalNoError = 0;
br.mark(1000);
while ((line = br.readLine()) != null) {
@@ -289,14 +324,23 @@
long dtStart = Math.max(startDate.getTime(), lastDate.getTime());
long dtEnd = Math.min(date.getTime(), endDate.getTime());
double dt = (dtEnd - dtStart) / 1000.0;
+ double errorDt = 0;
if (biasConstant != null) {
long biasStart = Math.max(dtStart, biasConstant.getStart());
long biasEnd = Math.min(dtEnd, biasConstant.getEnd());
- biasDt = (biasEnd - biasStart) / 1000.0;
+ biasDt = Math.max(0, biasEnd - biasStart) / 1000.0;
if (positionConstant != null) {
long positionStart = Math.max(biasStart, positionConstant.getStart());
long positionEnd = Math.min(biasEnd, positionConstant.getEnd());
- positionDt = (positionEnd - positionStart) / 1000.0;
+ positionDt = Math.max(0, positionEnd - positionStart) / 1000.0;
+
+ long errorEnd = positionStart;
+ if (errorDate == null) {
+ errorEnd = positionEnd;
+ } else if (errorDate.getTime() > dtStart) {
+ errorEnd = Math.min(positionEnd, errorDate.getTime());
+ }
+ errorDt = Math.max(0, errorEnd - positionStart) / 1000.0;
}
}
// System.out.format("start %d end %d date %d lastDate %d current %f dt %f\n", startDate.getTime(), endDate.getTime(), date.getTime(), lastDate.getTime(), current, dt);
@@ -311,6 +355,10 @@
totalChargeWithBiasAtNominal += positionDt * current;
totalGatedChargeWithBiasAtNominal += positionDt * current * livetime;
totalGoodChargeWithBiasAtNominal += positionDt * current * livetime * efficiency;
+
+ totalChargeWithBiasAtNominalNoError += errorDt * current;
+ totalGatedChargeWithBiasAtNominalNoError += errorDt * current * livetime;
+ totalGoodChargeWithBiasAtNominalNoError += errorDt * current * livetime * efficiency;
}
}
}
@@ -326,11 +374,11 @@
}
if (perRun) {
int nEvents = Integer.parseInt(record.get(9));
- System.out.format("%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal, totalGoodCharge, totalGoodChargeWithBias, totalGoodChargeWithBiasAtNominal);
+ System.out.format("%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalChargeWithBiasAtNominalNoError, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal, totalGatedChargeWithBiasAtNominalNoError, totalGoodCharge, totalGoodChargeWithBias, totalGoodChargeWithBiasAtNominal, totalGoodChargeWithBiasAtNominalNoError);
} else {
int fileNum = Integer.parseInt(record.get(1));
int nEvents = Integer.parseInt(record.get(2));
- System.out.format("%d\t%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, fileNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal, totalGoodCharge, totalGoodChargeWithBias, totalGoodChargeWithBiasAtNominal);
+ System.out.format("%d\t%d\t%s\t%d\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\n", runNum, fileNum, nominalPosition, nEvents, totalCharge, totalChargeWithBias, totalChargeWithBiasAtNominal, totalChargeWithBiasAtNominalNoError, totalGatedCharge, totalGatedChargeWithBias, totalGatedChargeWithBiasAtNominal, totalGatedChargeWithBiasAtNominalNoError, totalGoodCharge, totalGoodChargeWithBias, totalGoodChargeWithBiasAtNominal, totalGoodChargeWithBiasAtNominalNoError);
}
}
} catch (Exception ex) {
Modified: java/branches/jeremy-dev2/util/pom.xml
=============================================================================
--- java/branches/jeremy-dev2/util/pom.xml (original)
+++ java/branches/jeremy-dev2/util/pom.xml Mon Mar 21 18:04:10 2016
@@ -7,7 +7,7 @@
<groupId>org.hps</groupId>
<artifactId>hps-parent</artifactId>
<relativePath>../parent/pom.xml</relativePath>
- <version>3.7-SNAPSHOT</version>
+ <version>3.8-SNAPSHOT</version>
</parent>
<scm>
<url>http://java.freehep.org/svn/repos/hps/list/java/trunk/util/</url>
Modified: java/branches/jeremy-dev2/util/src/main/java/org/hps/util/MergeBunches.java
=============================================================================
--- java/branches/jeremy-dev2/util/src/main/java/org/hps/util/MergeBunches.java (original)
+++ java/branches/jeremy-dev2/util/src/main/java/org/hps/util/MergeBunches.java Mon Mar 21 18:04:10 2016
@@ -454,7 +454,6 @@
* Copies an mc particle and stores it together with the copy in a map.
* Adds it to the list of mc particles as well as the overlay mc particles.
* Also copies and keeps all ancestors.
- * @param event
* @param particle
*/
protected void addOverlayMcParticle(MCParticle particle) {
|